X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=85ba3ab3fb90fe25d681b63269cf24831f7ad7b1;hb=be344f7eafb39d2374938fd92de9cb57cab8dfd3;hp=d416efbdf7a86bd52017a508ca856a2451947006;hpb=fadfc481919091c495941775de43798b8c6a6f45;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py old mode 100755 new mode 100644 index d416efbd..85ba3ab3 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -37,7 +37,9 @@ import os import re import psycopg2 import traceback -from datetime import datetime +from datetime import datetime, timedelta +from errno import ENOENT +from tempfile import mkstemp, mkdtemp from inspect import getargspec @@ -51,7 +53,6 @@ from sqlalchemy.exc import * from sqlalchemy.orm.exc import NoResultFound from config import Config -from singleton import Singleton from textutils import fix_maintainer ################################################################################ @@ -376,16 +377,16 @@ def get_binary_from_name_suite(package, suitename, session=None): sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name FROM binaries b, files fi, location l, component c, bin_associations ba, suite su - WHERE b.package=:package + WHERE b.package='%(package)s' AND b.file = fi.id AND fi.location = l.id AND l.component = c.id AND ba.bin=b.id AND ba.suite = su.id - AND su.suite_name=:suitename + AND su.suite_name %(suitename)s ORDER BY b.version DESC""" - return session.execute(sql, {'package': package, 'suitename': suitename}) + return session.execute(sql % {'package': package, 'suitename': suitename}) __all__.append('get_binary_from_name_suite') @@ -430,12 +431,188 @@ __all__.append('BinaryACLMap') ################################################################################ +MINIMAL_APT_CONF=""" +Dir +{ + ArchiveDir "%(archivepath)s"; + OverrideDir "/srv/ftp.debian.org/scripts/override/"; + CacheDir "/srv/ftp.debian.org/database/"; +}; + +Default +{ + Packages::Compress ". bzip2 gzip"; + Sources::Compress ". bzip2 gzip"; + DeLinkLimit 0; + FileMode 0664; +} + +bindirectory "incoming" +{ + Packages "Packages"; + Contents " "; + + BinOverride "override.sid.all3"; + BinCacheDB "packages-accepted.db"; + + FileList "%(filelist)s"; + + PathPrefix ""; + Packages::Extensions ".deb .udeb"; +}; + +bindirectory "incoming/" +{ + Sources "Sources"; + BinOverride "override.sid.all3"; + SrcOverride "override.sid.all3.src"; + FileList "%(filelist)s"; +}; +""" + class BuildQueue(object): def __init__(self, *args, **kwargs): pass def __repr__(self): - return '' % self.queue_name + return '' % self.queue_name + + def write_metadata(self, starttime, force=False): + # Do we write out metafiles? + if not (force or self.generate_metadata): + return + + session = DBConn().session().object_session(self) + + fl_fd = fl_name = ac_fd = ac_name = None + tempdir = None + arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ]) + startdir = os.getcwd() + + try: + # Grab files we want to include + newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all() + # Write file list with newer files + (fl_fd, fl_name) = mkstemp() + for n in newer: + os.write(fl_fd, '%s\n' % n.fullpath) + os.close(fl_fd) + + # Write minimal apt.conf + # TODO: Remove hardcoding from template + (ac_fd, ac_name) = mkstemp() + os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path, + 'filelist': fl_name}) + os.close(ac_fd) + + # Run apt-ftparchive generate + os.chdir(os.path.dirname(ac_name)) + os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name)) + + # Run apt-ftparchive release + # TODO: Eww - fix this + bname = os.path.basename(self.path) + os.chdir(self.path) + os.chdir('..') + + # We have to remove the Release file otherwise it'll be included in the + # new one + try: + os.unlink(os.path.join(bname, 'Release')) + except OSError: + pass + + os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname)) + + # Sign if necessary + if self.signingkey: + cnf = Config() + keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"] + if cnf.has_key("Dinstall::SigningPubKeyring"): + keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"] + + os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey)) + + # Move the files if we got this far + os.rename('Release', os.path.join(bname, 'Release')) + if self.signingkey: + os.rename('Release.gpg', os.path.join(bname, 'Release.gpg')) + + # Clean up any left behind files + finally: + os.chdir(startdir) + if fl_fd: + try: + os.close(fl_fd) + except OSError: + pass + + if fl_name: + try: + os.unlink(fl_name) + except OSError: + pass + + if ac_fd: + try: + os.close(ac_fd) + except OSError: + pass + + if ac_name: + try: + os.unlink(ac_name) + except OSError: + pass + + def clean_and_update(self, starttime, Logger, dryrun=False): + """WARNING: This routine commits for you""" + session = DBConn().session().object_session(self) + + if self.generate_metadata and not dryrun: + self.write_metadata(starttime) + + # Grab files older than our execution time + older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all() + + for o in older: + killdb = False + try: + if dryrun: + Logger.log(["I: Would have removed %s from the queue" % o.fullpath]) + else: + Logger.log(["I: Removing %s from the queue" % o.fullpath]) + os.unlink(o.fullpath) + killdb = True + except OSError, e: + # If it wasn't there, don't worry + if e.errno == ENOENT: + killdb = True + else: + # TODO: Replace with proper logging call + Logger.log(["E: Could not remove %s" % o.fullpath]) + + if killdb: + session.delete(o) + + session.commit() + + for f in os.listdir(self.path): + if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release'): + continue + + try: + r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one() + except NoResultFound: + fp = os.path.join(self.path, f) + if dryrun: + Logger.log(["I: Would remove unused link %s" % fp]) + else: + Logger.log(["I: Removing unused link %s" % fp]) + try: + os.unlink(fp) + except OSError: + Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath]) def add_file_from_pool(self, poolfile): """Copies a file into the pool. Assumes that the PoolFile object is @@ -451,12 +628,12 @@ class BuildQueue(object): # In this case, update the BuildQueueFile entry so we # don't remove it too early f.lastused = datetime.now() - DBConn().session().object_session(pf).add(f) + DBConn().session().object_session(poolfile).add(f) return f # Prepare BuildQueueFile object qf = BuildQueueFile() - qf.queue_id = self.queue_id + qf.build_queue_id = self.queue_id qf.lastused = datetime.now() qf.filename = poolfile_basename @@ -464,14 +641,14 @@ class BuildQueue(object): queuepath = os.path.join(self.path, poolfile_basename) try: - if self.copy_pool_files: + if self.copy_files: # We need to copy instead of symlink import utils - utils.copy(targetfile, queuepath) + utils.copy(targetpath, queuepath) # NULL in the fileid field implies a copy qf.fileid = None else: - os.symlink(targetfile, queuepath) + os.symlink(targetpath, queuepath) qf.fileid = poolfile.file_id except OSError: return None @@ -517,7 +694,12 @@ class BuildQueueFile(object): pass def __repr__(self): - return '' % (self.filename, self.queue_id) + return '' % (self.filename, self.build_queue_id) + + @property + def fullpath(self): + return os.path.join(self.buildqueue.path, self.filename) + __all__.append('BuildQueueFile') @@ -970,7 +1152,7 @@ def get_poolfile_like_name(filename, session=None): """ # TODO: There must be a way of properly using bind parameters with %FOO% - q = session.query(PoolFile).filter(PoolFile.filename.like('%%%s%%' % filename)) + q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename)) return q.all() @@ -1255,6 +1437,19 @@ class DBChange(object): def __repr__(self): return '' % self.changesname + def clean_from_queue(self): + session = DBConn().session().object_session(self) + + # Remove changes_pool_files entries + self.poolfiles = [] + + # Remove changes_pending_files references + self.files = [] + + # Clear out of queue + self.in_queue = None + self.approved_for_id = None + __all__.append('DBChange') @session_wrapper @@ -2063,6 +2258,11 @@ def add_dsc_to_db(u, filename, session=None): poolfile = add_poolfile(filename, dentry, dsc_location_id, session) pfs.append(poolfile) files_id = poolfile.file_id + else: + poolfile = get_poolfile_by_id(files_id, session) + if poolfile is None: + utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id) + pfs.append(poolfile) df.poolfile_id = files_id session.add(df) @@ -2077,21 +2277,22 @@ def add_dsc_to_db(u, filename, session=None): uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id) added_ids = {} - for up in uploader_ids: - if added_ids.has_key(up): - utils.warn("Already saw uploader %s for source %s" % (up, source.source)) + for up_id in uploader_ids: + if added_ids.has_key(up_id): + import utils + utils.warn("Already saw uploader %s for source %s" % (up_id, source.source)) continue - added_ids[u]=1 + added_ids[up_id]=1 su = SrcUploader() - su.maintainer_id = up + su.maintainer_id = up_id su.source_id = source.source_id session.add(su) session.flush() - return dsc_component, dsc_location_id, pfs + return source, dsc_component, dsc_location_id, pfs __all__.append('add_dsc_to_db') @@ -2502,65 +2703,72 @@ __all__.append('UploadBlock') ################################################################################ -class DBConn(Singleton): +class DBConn(object): """ database module init. """ + __shared_state = {} + def __init__(self, *args, **kwargs): - super(DBConn, self).__init__(*args, **kwargs) + self.__dict__ = self.__shared_state - def _startup(self, *args, **kwargs): - self.debug = False - if kwargs.has_key('debug'): - self.debug = True - self.__createconn() + if not getattr(self, 'initialised', False): + self.initialised = True + self.debug = kwargs.has_key('debug') + self.__createconn() def __setuptables(self): - self.tbl_architecture = Table('architecture', self.db_meta, autoload=True) - self.tbl_archive = Table('archive', self.db_meta, autoload=True) - self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True) - self.tbl_binaries = Table('binaries', self.db_meta, autoload=True) - self.tbl_binary_acl = Table('binary_acl', self.db_meta, autoload=True) - self.tbl_binary_acl_map = Table('binary_acl_map', self.db_meta, autoload=True) - self.tbl_build_queue = Table('build_queue', self.db_meta, autoload=True) - self.tbl_build_queue_files = Table('build_queue_files', self.db_meta, autoload=True) - self.tbl_component = Table('component', self.db_meta, autoload=True) - self.tbl_config = Table('config', self.db_meta, autoload=True) - self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True) - self.tbl_content_file_names = Table('content_file_names', self.db_meta, autoload=True) - self.tbl_content_file_paths = Table('content_file_paths', self.db_meta, autoload=True) - self.tbl_changes_pending_binary = Table('changes_pending_binaries', self.db_meta, autoload=True) - self.tbl_changes_pending_files = Table('changes_pending_files', self.db_meta, autoload=True) - self.tbl_changes_pending_files_map = Table('changes_pending_files_map', self.db_meta, autoload=True) - self.tbl_changes_pending_source = Table('changes_pending_source', self.db_meta, autoload=True) - self.tbl_changes_pending_source_files = Table('changes_pending_source_files', self.db_meta, autoload=True) - self.tbl_changes_pool_files = Table('changes_pool_files', self.db_meta, autoload=True) - self.tbl_dsc_files = Table('dsc_files', self.db_meta, autoload=True) - self.tbl_files = Table('files', self.db_meta, autoload=True) - self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True) - self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True) - self.tbl_changes = Table('changes', self.db_meta, autoload=True) - self.tbl_keyring_acl_map = Table('keyring_acl_map', self.db_meta, autoload=True) - self.tbl_location = Table('location', self.db_meta, autoload=True) - self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True) - self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True) - self.tbl_override = Table('override', self.db_meta, autoload=True) - self.tbl_override_type = Table('override_type', self.db_meta, autoload=True) - self.tbl_pending_content_associations = Table('pending_content_associations', self.db_meta, autoload=True) - self.tbl_policy_queue = Table('policy_queue', self.db_meta, autoload=True) - self.tbl_priority = Table('priority', self.db_meta, autoload=True) - self.tbl_section = Table('section', self.db_meta, autoload=True) - self.tbl_source = Table('source', self.db_meta, autoload=True) - self.tbl_source_acl = Table('source_acl', self.db_meta, autoload=True) - self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True) - self.tbl_src_format = Table('src_format', self.db_meta, autoload=True) - self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True) - self.tbl_suite = Table('suite', self.db_meta, autoload=True) - self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True) - self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True) - self.tbl_suite_build_queue_copy = Table('suite_build_queue_copy', self.db_meta, autoload=True) - self.tbl_uid = Table('uid', self.db_meta, autoload=True) - self.tbl_upload_blocks = Table('upload_blocks', self.db_meta, autoload=True) + tables = ( + 'architecture', + 'archive', + 'bin_associations', + 'binaries', + 'binary_acl', + 'binary_acl_map', + 'build_queue', + 'build_queue_files', + 'component', + 'config', + 'content_associations', + 'content_file_names', + 'content_file_paths', + 'changes_pending_binaries', + 'changes_pending_files', + 'changes_pending_files_map', + 'changes_pending_source', + 'changes_pending_source_files', + 'changes_pool_files', + 'dsc_files', + 'files', + 'fingerprint', + 'keyrings', + 'changes', + 'keyring_acl_map', + 'location', + 'maintainer', + 'new_comments', + 'override', + 'override_type', + 'pending_content_associations', + 'policy_queue', + 'priority', + 'section', + 'source', + 'source_acl', + 'src_associations', + 'src_format', + 'src_uploaders', + 'suite', + 'suite_architectures', + 'suite_src_formats', + 'suite_build_queue_copy', + 'uid', + 'upload_blocks', + ) + + for table_name in tables: + table = Table(table_name, self.db_meta, autoload=True) + setattr(self, 'tbl_%s' % table_name, table) def __setupmappers(self): mapper(Architecture, self.tbl_architecture, @@ -2649,6 +2857,16 @@ class DBConn(Singleton): poolfiles = relation(PoolFile, secondary=self.tbl_changes_pool_files, backref="changeslinks"), + seen = self.tbl_changes.c.seen, + source = self.tbl_changes.c.source, + binaries = self.tbl_changes.c.binaries, + architecture = self.tbl_changes.c.architecture, + distribution = self.tbl_changes.c.distribution, + urgency = self.tbl_changes.c.urgency, + maintainer = self.tbl_changes.c.maintainer, + changedby = self.tbl_changes.c.changedby, + date = self.tbl_changes.c.date, + version = self.tbl_changes.c.version, files = relation(ChangePendingFile, secondary=self.tbl_changes_pending_files_map, backref="changesfile"), @@ -2657,11 +2875,16 @@ class DBConn(Singleton): primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)), approved_for_id = self.tbl_changes.c.approved_for)) - mapper(ChangePendingBinary, self.tbl_changes_pending_binary, - properties = dict(change_pending_binary_id = self.tbl_changes_pending_binary.c.id)) + mapper(ChangePendingBinary, self.tbl_changes_pending_binaries, + properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id)) mapper(ChangePendingFile, self.tbl_changes_pending_files, - properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id)) + properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id, + filename = self.tbl_changes_pending_files.c.filename, + size = self.tbl_changes_pending_files.c.size, + md5sum = self.tbl_changes_pending_files.c.md5sum, + sha1sum = self.tbl_changes_pending_files.c.sha1sum, + sha256sum = self.tbl_changes_pending_files.c.sha256sum)) mapper(ChangePendingSource, self.tbl_changes_pending_source, properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,