X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=8879185175d6e1123e9acf19dc43f7a62efae89f;hb=9a14adff2ca4e7a5346c0c3b5723d9959022291a;hp=5b30fce99612469c74991d42fe3fbc5871ff532e;hpb=945c33a367afa0e69f3aa8b4d087deb6356d8931;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py old mode 100755 new mode 100644 index 5b30fce9..88791851 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -51,7 +51,6 @@ from sqlalchemy.exc import * from sqlalchemy.orm.exc import NoResultFound from config import Config -from singleton import Singleton from textutils import fix_maintainer ################################################################################ @@ -435,7 +434,7 @@ class BuildQueue(object): pass def __repr__(self): - return '' % self.queue_name + return '' % self.queue_name def add_file_from_pool(self, poolfile): """Copies a file into the pool. Assumes that the PoolFile object is @@ -448,30 +447,30 @@ class BuildQueue(object): for f in self.queuefiles: if f.fileid is not None and f.fileid == poolfile.file_id or \ f.poolfile.filename == poolfile_basename: - # In this case, update the QueueFile entry so we + # In this case, update the BuildQueueFile entry so we # don't remove it too early f.lastused = datetime.now() - DBConn().session().object_session(pf).add(f) + DBConn().session().object_session(poolfile).add(f) return f - # Prepare QueueFile object - qf = QueueFile() - qf.queue_id = self.queue_id + # Prepare BuildQueueFile object + qf = BuildQueueFile() + qf.build_queue_id = self.queue_id qf.lastused = datetime.now() - qf.filename = dest + qf.filename = poolfile_basename - targetpath = qf.fullpath + targetpath = poolfile.fullpath queuepath = os.path.join(self.path, poolfile_basename) try: - if self.copy_pool_files: + if self.copy_files: # We need to copy instead of symlink import utils - utils.copy(targetfile, queuepath) + utils.copy(targetpath, queuepath) # NULL in the fileid field implies a copy qf.fileid = None else: - os.symlink(targetfile, queuepath) + os.symlink(targetpath, queuepath) qf.fileid = poolfile.file_id except OSError: return None @@ -485,9 +484,9 @@ class BuildQueue(object): __all__.append('BuildQueue') @session_wrapper -def get_queue(queuename, session=None): +def get_build_queue(queuename, session=None): """ - Returns Queue object for given C{queue name}, creating it if it does not + Returns BuildQueue object for given C{queue name}, creating it if it does not exist. @type queuename: string @@ -497,18 +496,18 @@ def get_queue(queuename, session=None): @param session: Optional SQLA session object (a temporary one will be generated if not supplied) - @rtype: Queue - @return: Queue object for the given queue + @rtype: BuildQueue + @return: BuildQueue object for the given queue """ - q = session.query(Queue).filter_by(queue_name=queuename) + q = session.query(BuildQueue).filter_by(queue_name=queuename) try: return q.one() except NoResultFound: return None -__all__.append('get_queue') +__all__.append('get_build_queue') ################################################################################ @@ -517,7 +516,7 @@ class BuildQueueFile(object): pass def __repr__(self): - return '' % (self.filename, self.queue_id) + return '' % (self.filename, self.build_queue_id) __all__.append('BuildQueueFile') @@ -1248,19 +1247,19 @@ __all__.append('KeyringACLMap') ################################################################################ -class KnownChange(object): +class DBChange(object): def __init__(self, *args, **kwargs): pass def __repr__(self): - return '' % self.changesname + return '' % self.changesname -__all__.append('KnownChange') +__all__.append('DBChange') @session_wrapper -def get_knownchange(filename, session=None): +def get_dbchange(filename, session=None): """ - returns knownchange object for given C{filename}. + returns DBChange object for given C{filename}. @type archive: string @param archive: the name of the arhive @@ -1273,14 +1272,14 @@ def get_knownchange(filename, session=None): @return: Archive object for the given name (None if not present) """ - q = session.query(KnownChange).filter_by(changesname=filename) + q = session.query(DBChange).filter_by(changesname=filename) try: return q.one() except NoResultFound: return None -__all__.append('get_knownchange') +__all__.append('get_dbchange') ################################################################################ @@ -1659,6 +1658,31 @@ class PolicyQueue(object): __all__.append('PolicyQueue') +@session_wrapper +def get_policy_queue(queuename, session=None): + """ + Returns PolicyQueue object for given C{queue name} + + @type queuename: string + @param queuename: The name of the queue + + @type session: Session + @param session: Optional SQLA session object (a temporary one will be + generated if not supplied) + + @rtype: PolicyQueue + @return: PolicyQueue object for the given queue + """ + + q = session.query(PolicyQueue).filter_by(queue_name=queuename) + + try: + return q.one() + except NoResultFound: + return None + +__all__.append('get_policy_queue') + ################################################################################ class Priority(object): @@ -1965,6 +1989,7 @@ __all__.append('get_source_in_suite') def add_dsc_to_db(u, filename, session=None): entry = u.pkg.files[filename] source = DBSource() + pfs = [] source.source = u.pkg.dsc["source"] source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch @@ -1983,6 +2008,7 @@ def add_dsc_to_db(u, filename, session=None): filename = entry["pool name"] + filename poolfile = add_poolfile(filename, entry, dsc_location_id, session) session.flush() + pfs.append(poolfile) entry["files id"] = poolfile.file_id source.poolfile_id = entry["files id"] @@ -2026,6 +2052,7 @@ def add_dsc_to_db(u, filename, session=None): # FIXME: needs to check for -1/-2 and or handle exception if found and obj is not None: files_id = obj.file_id + pfs.append(obj) # If still not found, add it if files_id is None: @@ -2033,7 +2060,13 @@ def add_dsc_to_db(u, filename, session=None): dentry["sha1sum"] = dfentry["sha1sum"] dentry["sha256sum"] = dfentry["sha256sum"] poolfile = add_poolfile(filename, dentry, dsc_location_id, session) + pfs.append(poolfile) files_id = poolfile.file_id + else: + poolfile = get_poolfile_by_id(files_id, session) + if poolfile is None: + utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id) + pfs.append(poolfile) df.poolfile_id = files_id session.add(df) @@ -2062,7 +2095,7 @@ def add_dsc_to_db(u, filename, session=None): session.flush() - return dsc_component, dsc_location_id + return dsc_component, dsc_location_id, pfs __all__.append('add_dsc_to_db') @@ -2088,13 +2121,14 @@ def add_deb_to_db(u, filename, session=None): filename = entry["pool name"] + filename fullpath = os.path.join(cnf["Dir::Pool"], filename) if not entry.get("location id", None): - entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id + entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id - if not entry.get("files id", None): + if entry.get("files id", None): + poolfile = get_poolfile_by_id(bin.poolfile_id) + bin.poolfile_id = entry["files id"] + else: poolfile = add_poolfile(filename, entry, entry["location id"], session) - entry["files id"] = poolfile.file_id - - bin.poolfile_id = entry["files id"] + bin.poolfile_id = entry["files id"] = poolfile.file_id # Find source id bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session) @@ -2125,6 +2159,8 @@ def add_deb_to_db(u, filename, session=None): # session.rollback() # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename) + return poolfile + __all__.append('add_deb_to_db') ################################################################################ @@ -2470,64 +2506,72 @@ __all__.append('UploadBlock') ################################################################################ -class DBConn(Singleton): +class DBConn(object): """ database module init. """ + __shared_state = {} + def __init__(self, *args, **kwargs): - super(DBConn, self).__init__(*args, **kwargs) + self.__dict__ = self.__shared_state - def _startup(self, *args, **kwargs): - self.debug = False - if kwargs.has_key('debug'): - self.debug = True - self.__createconn() + if not getattr(self, 'initialised', False): + self.initialised = True + self.debug = kwargs.has_key('debug') + self.__createconn() def __setuptables(self): - self.tbl_architecture = Table('architecture', self.db_meta, autoload=True) - self.tbl_archive = Table('archive', self.db_meta, autoload=True) - self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True) - self.tbl_binaries = Table('binaries', self.db_meta, autoload=True) - self.tbl_binary_acl = Table('binary_acl', self.db_meta, autoload=True) - self.tbl_binary_acl_map = Table('binary_acl_map', self.db_meta, autoload=True) - self.tbl_build_queue = Table('build_queue', self.db_meta, autoload=True) - self.tbl_build_queue_files = Table('build_queue_files', self.db_meta, autoload=True) - self.tbl_component = Table('component', self.db_meta, autoload=True) - self.tbl_config = Table('config', self.db_meta, autoload=True) - self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True) - self.tbl_content_file_names = Table('content_file_names', self.db_meta, autoload=True) - self.tbl_content_file_paths = Table('content_file_paths', self.db_meta, autoload=True) - self.tbl_changes_pending_binary = Table('changes_pending_binaries', self.db_meta, autoload=True) - self.tbl_changes_pending_files = Table('changes_pending_files', self.db_meta, autoload=True) - self.tbl_changes_pending_source = Table('changes_pending_source', self.db_meta, autoload=True) - self.tbl_changes_pending_source_files = Table('changes_pending_source_files', self.db_meta, autoload=True) - self.tbl_changes_pool_files = Table('changes_pool_files', self.db_meta, autoload=True) - self.tbl_dsc_files = Table('dsc_files', self.db_meta, autoload=True) - self.tbl_files = Table('files', self.db_meta, autoload=True) - self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True) - self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True) - self.tbl_known_changes = Table('known_changes', self.db_meta, autoload=True) - self.tbl_keyring_acl_map = Table('keyring_acl_map', self.db_meta, autoload=True) - self.tbl_location = Table('location', self.db_meta, autoload=True) - self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True) - self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True) - self.tbl_override = Table('override', self.db_meta, autoload=True) - self.tbl_override_type = Table('override_type', self.db_meta, autoload=True) - self.tbl_pending_content_associations = Table('pending_content_associations', self.db_meta, autoload=True) - self.tbl_policy_queue = Table('policy_queue', self.db_meta, autoload=True) - self.tbl_priority = Table('priority', self.db_meta, autoload=True) - self.tbl_section = Table('section', self.db_meta, autoload=True) - self.tbl_source = Table('source', self.db_meta, autoload=True) - self.tbl_source_acl = Table('source_acl', self.db_meta, autoload=True) - self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True) - self.tbl_src_format = Table('src_format', self.db_meta, autoload=True) - self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True) - self.tbl_suite = Table('suite', self.db_meta, autoload=True) - self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True) - self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True) - self.tbl_suite_build_queue_copy = Table('suite_build_queue_copy', self.db_meta, autoload=True) - self.tbl_uid = Table('uid', self.db_meta, autoload=True) - self.tbl_upload_blocks = Table('upload_blocks', self.db_meta, autoload=True) + tables = ( + 'architecture', + 'archive', + 'bin_associations', + 'binaries', + 'binary_acl', + 'binary_acl_map', + 'build_queue', + 'build_queue_files', + 'component', + 'config', + 'content_associations', + 'content_file_names', + 'content_file_paths', + 'changes_pending_binaries', + 'changes_pending_files', + 'changes_pending_files_map', + 'changes_pending_source', + 'changes_pending_source_files', + 'changes_pool_files', + 'dsc_files', + 'files', + 'fingerprint', + 'keyrings', + 'changes', + 'keyring_acl_map', + 'location', + 'maintainer', + 'new_comments', + 'override', + 'override_type', + 'pending_content_associations', + 'policy_queue', + 'priority', + 'section', + 'source', + 'source_acl', + 'src_associations', + 'src_format', + 'src_uploaders', + 'suite', + 'suite_architectures', + 'suite_src_formats', + 'suite_build_queue_copy', + 'uid', + 'upload_blocks', + ) + + for table_name in tables: + table = Table(table_name, self.db_meta, autoload=True) + setattr(self, 'tbl_%s' % table_name, table) def __setupmappers(self): mapper(Architecture, self.tbl_architecture, @@ -2611,22 +2655,28 @@ class DBConn(Singleton): properties = dict(keyring_name = self.tbl_keyrings.c.name, keyring_id = self.tbl_keyrings.c.id)) - mapper(KnownChange, self.tbl_known_changes, - properties = dict(known_change_id = self.tbl_known_changes.c.id, + mapper(DBChange, self.tbl_changes, + properties = dict(change_id = self.tbl_changes.c.id, poolfiles = relation(PoolFile, secondary=self.tbl_changes_pool_files, backref="changeslinks"), - files = relation(ChangePendingFile, backref="changesfile"))) + files = relation(ChangePendingFile, + secondary=self.tbl_changes_pending_files_map, + backref="changesfile"), + in_queue_id = self.tbl_changes.c.in_queue, + in_queue = relation(PolicyQueue, + primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)), + approved_for_id = self.tbl_changes.c.approved_for)) - mapper(ChangePendingBinary, self.tbl_changes_pending_binary, - properties = dict(change_pending_binary_id = self.tbl_changes_pending_binary.c.id)) + mapper(ChangePendingBinary, self.tbl_changes_pending_binaries, + properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id)) mapper(ChangePendingFile, self.tbl_changes_pending_files, properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id)) mapper(ChangePendingSource, self.tbl_changes_pending_source, properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id, - change = relation(KnownChange), + change = relation(DBChange), maintainer = relation(Maintainer, primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)), changedby = relation(Maintainer,