- def __repr__(self):
- return '<PendingContentAssociation %s>' % self.pca_id
-
-__all__.append('PendingContentAssociation')
-
-def insert_pending_content_paths(package, fullpaths, session=None):
- """
- Make sure given paths are temporarily associated with given
- package
-
- @type package: dict
- @param package: the package to associate with should have been read in from the binary control file
- @type fullpaths: list
- @param fullpaths: the list of paths of the file being associated with the binary
- @type session: SQLAlchemy session
- @param session: Optional SQLAlchemy session. If this is passed, the caller
- is responsible for ensuring a transaction has begun and committing the
- results or rolling back based on the result code. If not passed, a commit
- will be performed at the end of the function
-
- @return: True upon success, False if there is a problem
- """
-
- privatetrans = False
-
- if session is None:
- session = DBConn().session()
- privatetrans = True
-
- try:
- arch = get_architecture(package['Architecture'], session)
- arch_id = arch.arch_id
-
- # Remove any already existing recorded files for this package
- q = session.query(PendingContentAssociation)
- q = q.filter_by(package=package['Package'])
- q = q.filter_by(version=package['Version'])
- q = q.filter_by(architecture=arch_id)
- q.delete()
-
- # Insert paths
- pathcache = {}
- for fullpath in fullpaths:
- (path, file) = os.path.split(fullpath)
-
- if path.startswith( "./" ):
- path = path[2:]
-
- filepath_id = get_or_set_contents_path_id(path, session)
- filename_id = get_or_set_contents_file_id(file, session)
-
- pathcache[fullpath] = (filepath_id, filename_id)
-
- for fullpath, dat in pathcache.items():
- pca = PendingContentAssociation()
- pca.package = package['Package']
- pca.version = package['Version']
- pca.filepath_id = dat[0]
- pca.filename_id = dat[1]
- pca.architecture = arch_id
- session.add(pca)
-
- # Only commit if we set up the session ourself
- if privatetrans:
- session.commit()
- session.close()
- else:
- session.flush()
-
- return True
- except Exception, e:
- traceback.print_exc()
-
- # Only rollback if we set up the session ourself
- if privatetrans:
- session.rollback()
- session.close()
-
- return False
-
-__all__.append('insert_pending_content_paths')
-
-################################################################################
-
-class Priority(object):
- def __init__(self, *args, **kwargs):
- pass
-
- def __eq__(self, val):
- if isinstance(val, str):
- return (self.priority == val)
- # This signals to use the normal comparison operator
- return NotImplemented
-
- def __ne__(self, val):
- if isinstance(val, str):
- return (self.priority != val)
- # This signals to use the normal comparison operator
- return NotImplemented
-
- def __repr__(self):
- return '<Priority %s (%s)>' % (self.priority, self.priority_id)
-
-__all__.append('Priority')
-
-@session_wrapper
-def get_priority(priority, session=None):
- """
- Returns Priority object for given C{priority name}.
-
- @type priority: string
- @param priority: The name of the priority
-
- @type session: Session
- @param session: Optional SQLA session object (a temporary one will be
- generated if not supplied)
-
- @rtype: Priority
- @return: Priority object for the given priority
- """
-
- q = session.query(Priority).filter_by(priority=priority)
-
- try:
- return q.one()
- except NoResultFound:
- return None
-
-__all__.append('get_priority')
-
-@session_wrapper
-def get_priorities(session=None):
- """
- Returns dictionary of priority names -> id mappings
-
- @type session: Session
- @param session: Optional SQL session object (a temporary one will be
- generated if not supplied)
-
- @rtype: dictionary
- @return: dictionary of priority names -> id mappings
- """
-
- ret = {}
- q = session.query(Priority)
- for x in q.all():
- ret[x.priority] = x.priority_id
-
- return ret
-
-__all__.append('get_priorities')
-
-################################################################################
-
-class Queue(object):
- def __init__(self, *args, **kwargs):
- pass
-
- def __repr__(self):
- return '<Queue %s>' % self.queue_name
-
- def autobuild_upload(self, changes, srcpath, session=None):
- """
- Update queue_build database table used for incoming autobuild support.
-
- @type changes: Changes
- @param changes: changes object for the upload to process
-
- @type srcpath: string
- @param srcpath: path for the queue file entries/link destinations
-
- @type session: SQLAlchemy session
- @param session: Optional SQLAlchemy session. If this is passed, the
- caller is responsible for ensuring a transaction has begun and
- committing the results or rolling back based on the result code. If
- not passed, a commit will be performed at the end of the function,
- otherwise the caller is responsible for commiting.
-
- @rtype: NoneType or string
- @return: None if the operation failed, a string describing the error if not
- """
-
- privatetrans = False
- if session is None:
- session = DBConn().session()
- privatetrans = True
-
- # TODO: Remove by moving queue config into the database
- conf = Config()
-
- for suitename in changes.changes["distribution"].keys():
- # TODO: Move into database as:
- # buildqueuedir TEXT DEFAULT NULL (i.e. NULL is no build)
- # buildqueuecopy BOOLEAN NOT NULL DEFAULT FALSE (i.e. default is symlink)
- # This also gets rid of the SecurityQueueBuild hack below
- if suitename not in conf.ValueList("Dinstall::QueueBuildSuites"):
- continue
-
- # Find suite object
- s = get_suite(suitename, session)
- if s is None:
- return "INTERNAL ERROR: Could not find suite %s" % suitename
-
- # TODO: Get from database as above
- dest_dir = conf["Dir::QueueBuild"]
-
- # TODO: Move into database as above
- if conf.FindB("Dinstall::SecurityQueueBuild"):
- dest_dir = os.path.join(dest_dir, suitename)
-
- for file_entry in changes.files.keys():
- src = os.path.join(srcpath, file_entry)
- dest = os.path.join(dest_dir, file_entry)
-
- # TODO: Move into database as above
- if conf.FindB("Dinstall::SecurityQueueBuild"):
- # Copy it since the original won't be readable by www-data
- import utils
- utils.copy(src, dest)
- else:
- # Create a symlink to it
- os.symlink(src, dest)
-
- qb = QueueBuild()
- qb.suite_id = s.suite_id
- qb.queue_id = self.queue_id
- qb.filename = dest
- qb.in_queue = True
-
- session.add(qb)
-
- # If the .orig.tar.gz is in the pool, create a symlink to
- # it (if one doesn't already exist)
- if changes.orig_tar_id:
- # Determine the .orig.tar.gz file name
- for dsc_file in changes.dsc_files.keys():
- if dsc_file.endswith(".orig.tar.gz"):
- filename = dsc_file
-
- dest = os.path.join(dest_dir, filename)
-
- # If it doesn't exist, create a symlink
- if not os.path.exists(dest):
- q = session.execute("SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id",
- {'id': changes.orig_tar_id})
- res = q.fetchone()
- if not res:
- return "[INTERNAL ERROR] Couldn't find id %s in files table." % (changes.orig_tar_id)
-
- src = os.path.join(res[0], res[1])
- os.symlink(src, dest)
-
- # Add it to the list of packages for later processing by apt-ftparchive
- qb = QueueBuild()
- qb.suite_id = s.suite_id
- qb.queue_id = self.queue_id
- qb.filename = dest
- qb.in_queue = True
- session.add(qb)
-
- # If it does, update things to ensure it's not removed prematurely
- else:
- qb = get_queue_build(dest, s.suite_id, session)
- if qb is None:
- qb.in_queue = True
- qb.last_used = None
- session.add(qb)
-
- if privatetrans:
- session.commit()
- session.close()
-
- return None