From: Mike O'Connor Date: Sun, 8 Nov 2009 01:49:40 +0000 (-0500) Subject: Merge remote branch 'origin/master' into contents X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=3fd95d52f5c4a4761a1e332d469c6948e091723c;hp=-c;p=dak.git Merge remote branch 'origin/master' into contents * origin/master: (41 commits) pass arguments properly to add_known_changes One o too much re-enable queue-report stuff allow setting in_queue quick fix up of queue-report for NEW if it isn't a dictionary, don't try and use it as one... fix variable name run on all queues and don't pass useless -v remove pointless verbose option not everything can be done in parallel... add back clean-suites Remove lockfile dinstall, unchecked functions buildd For now do not run process-upload in dinstall remove accepted references re-enable fingerprint importing honour No-Action and say what's going on move build queue cleaning to manage-build-queues ... --- 3fd95d52f5c4a4761a1e332d469c6948e091723c diff --combined daklib/dbconn.py index d05dd159,84d5b1c2..d4a1d99e --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@@ -37,7 -37,9 +37,9 @@@ import o import re import psycopg2 import traceback - from datetime import datetime + from datetime import datetime, timedelta + from errno import ENOENT + from tempfile import mkstemp, mkdtemp from inspect import getargspec @@@ -50,8 -52,6 +52,8 @@@ from sqlalchemy import types as sqltype from sqlalchemy.exc import * from sqlalchemy.orm.exc import NoResultFound +# Only import Config until Queue stuff is changed to store its config +# in the database from config import Config from textutils import fix_maintainer @@@ -431,6 -431,45 +433,45 @@@ __all__.append('BinaryACLMap' ################################################################################ + MINIMAL_APT_CONF=""" + Dir + { + ArchiveDir "%(archivepath)s"; + OverrideDir "/srv/ftp.debian.org/scripts/override/"; + CacheDir "/srv/ftp.debian.org/database/"; + }; + + Default + { + Packages::Compress ". bzip2 gzip"; + Sources::Compress ". bzip2 gzip"; + DeLinkLimit 0; + FileMode 0664; + } + + bindirectory "incoming" + { + Packages "Packages"; + Contents " "; + + BinOverride "override.sid.all3"; + BinCacheDB "packages-accepted.db"; + + FileList "%(filelist)s"; + + PathPrefix ""; + Packages::Extensions ".deb .udeb"; + }; + + bindirectory "incoming/" + { + Sources "Sources"; + BinOverride "override.sid.all3"; + SrcOverride "override.sid.all3.src"; + FileList "%(filelist)s"; + }; + """ + class BuildQueue(object): def __init__(self, *args, **kwargs): pass @@@ -438,6 -477,143 +479,143 @@@ def __repr__(self): return '' % self.queue_name + def write_metadata(self, starttime, force=False): + # Do we write out metafiles? + if not (force or self.generate_metadata): + return + + session = DBConn().session().object_session(self) + + fl_fd = fl_name = ac_fd = ac_name = None + tempdir = None + arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ]) + startdir = os.getcwd() + + try: + # Grab files we want to include + newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all() + # Write file list with newer files + (fl_fd, fl_name) = mkstemp() + for n in newer: + os.write(fl_fd, '%s\n' % n.fullpath) + os.close(fl_fd) + + # Write minimal apt.conf + # TODO: Remove hardcoding from template + (ac_fd, ac_name) = mkstemp() + os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path, + 'filelist': fl_name}) + os.close(ac_fd) + + # Run apt-ftparchive generate + os.chdir(os.path.dirname(ac_name)) + os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name)) + + # Run apt-ftparchive release + # TODO: Eww - fix this + bname = os.path.basename(self.path) + os.chdir(self.path) + os.chdir('..') + + # We have to remove the Release file otherwise it'll be included in the + # new one + try: + os.unlink(os.path.join(bname, 'Release')) + except OSError: + pass + + os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname)) + + # Sign if necessary + if self.signingkey: + cnf = Config() + keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"] + if cnf.has_key("Dinstall::SigningPubKeyring"): + keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"] + + os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey)) + + # Move the files if we got this far + os.rename('Release', os.path.join(bname, 'Release')) + if self.signingkey: + os.rename('Release.gpg', os.path.join(bname, 'Release.gpg')) + + # Clean up any left behind files + finally: + os.chdir(startdir) + if fl_fd: + try: + os.close(fl_fd) + except OSError: + pass + + if fl_name: + try: + os.unlink(fl_name) + except OSError: + pass + + if ac_fd: + try: + os.close(ac_fd) + except OSError: + pass + + if ac_name: + try: + os.unlink(ac_name) + except OSError: + pass + + def clean_and_update(self, starttime, Logger, dryrun=False): + """WARNING: This routine commits for you""" + session = DBConn().session().object_session(self) + + if self.generate_metadata and not dryrun: + self.write_metadata(starttime) + + # Grab files older than our execution time + older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all() + + for o in older: + killdb = False + try: + if dryrun: + Logger.log(["I: Would have removed %s from the queue" % o.fullpath]) + else: + Logger.log(["I: Removing %s from the queue" % o.fullpath]) + os.unlink(o.fullpath) + killdb = True + except OSError, e: + # If it wasn't there, don't worry + if e.errno == ENOENT: + killdb = True + else: + # TODO: Replace with proper logging call + Logger.log(["E: Could not remove %s" % o.fullpath]) + + if killdb: + session.delete(o) + + session.commit() + + for f in os.listdir(self.path): + if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release'): + continue + + try: + r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one() + except NoResultFound: + fp = os.path.join(self.path, f) + if dryrun: + Logger.log(["I: Would remove unused link %s" % fp]) + else: + Logger.log(["I: Removing unused link %s" % fp]) + try: + os.unlink(fp) + except OSError: + Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath]) + def add_file_from_pool(self, poolfile): """Copies a file into the pool. Assumes that the PoolFile object is attached to the same SQLAlchemy session as the Queue object is. @@@ -518,7 -694,12 +696,12 @@@ class BuildQueueFile(object) pass def __repr__(self): - return '' % (self.filename, self.queue_id) + return '' % (self.filename, self.build_queue_id) + + @property + def fullpath(self): + return os.path.join(self.buildqueue.path, self.filename) + __all__.append('BuildQueueFile') @@@ -780,16 -961,11 +963,16 @@@ def insert_content_paths(binary_id, ful try: # Insert paths pathcache = {} - for fullpath in fullpaths: - if fullpath.startswith( './' ): - fullpath = fullpath[2:] - session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", { 'filename': fullpath, 'id': binary_id} ) + def generate_path_dicts(): + for fullpath in fullpaths: + if fullpath.startswith( './' ): + fullpath = fullpath[2:] + + yield {'fulename':fullpath, 'id': binary_id } + + session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", + generate_path_dicts() ) session.commit() if privatetrans: @@@ -1568,38 -1744,16 +1751,38 @@@ __all__.append('get_override_type' ################################################################################ -class PendingContentAssociation(object): +class DebContents(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % (self.package.package,self.file) + +__all__.append('DebContents') + + +class UdebContents(object): def __init__(self, *args, **kwargs): pass def __repr__(self): - return '' % self.pca_id + return '' % (self.package.package,self.file) -__all__.append('PendingContentAssociation') +__all__.append('UdebContents') + +class PendingBinContents(object): + def __init__(self, *args, **kwargs): + pass -def insert_pending_content_paths(package, fullpaths, session=None): + def __repr__(self): + return '' % self.contents_id + +__all__.append('PendingBinContents') + +def insert_pending_content_paths(package, + is_udeb, + fullpaths, + session=None): """ Make sure given paths are temporarily associated with given package @@@ -1628,27 -1782,32 +1811,27 @@@ arch_id = arch.arch_id # Remove any already existing recorded files for this package - q = session.query(PendingContentAssociation) + q = session.query(PendingBinContents) q = q.filter_by(package=package['Package']) q = q.filter_by(version=package['Version']) q = q.filter_by(architecture=arch_id) q.delete() - # Insert paths - pathcache = {} for fullpath in fullpaths: - (path, filename) = os.path.split(fullpath) - - if path.startswith( "./" ): - path = path[2:] - - filepath_id = get_or_set_contents_path_id(path, session) - filename_id = get_or_set_contents_file_id(filename, session) - pathcache[fullpath] = (filepath_id, filename_id) + if fullpath.startswith( "./" ): + fullpath = fullpath[2:] - for fullpath, dat in pathcache.items(): - pca = PendingContentAssociation() + pca = PendingBinContents() pca.package = package['Package'] pca.version = package['Version'] - pca.filepath_id = dat[0] - pca.filename_id = dat[1] + pca.file = fullpath pca.architecture = arch_id + + if isudeb: + pca.type = 8 # gross + else: + pca.type = 7 # also gross session.add(pca) # Only commit if we set up the session ourself @@@ -2552,18 -2711,19 +2735,18 @@@ class DBConn(object) 'binaries', 'binary_acl', 'binary_acl_map', + 'bin_contents' 'build_queue', 'build_queue_files', 'component', 'config', - 'content_associations', - 'content_file_names', - 'content_file_paths', 'changes_pending_binaries', 'changes_pending_files', 'changes_pending_files_map', 'changes_pending_source', 'changes_pending_source_files', 'changes_pool_files', + 'deb_contents', 'dsc_files', 'files', 'fingerprint', @@@ -2575,7 -2735,7 +2758,7 @@@ 'new_comments', 'override', 'override_type', - 'pending_content_associations', + 'pending_bin_contents', 'policy_queue', 'priority', 'section', @@@ -2588,7 -2748,6 +2771,7 @@@ 'suite_architectures', 'suite_src_formats', 'suite_build_queue_copy', + 'udeb_contents', 'uid', 'upload_blocks', ) @@@ -2612,29 -2771,12 +2795,29 @@@ binary_id = self.tbl_bin_associations.c.bin, binary = relation(DBBinary))) - mapper(BuildQueue, self.tbl_build_queue, - properties = dict(queue_id = self.tbl_build_queue.c.id)) - - mapper(BuildQueueFile, self.tbl_build_queue_files, - properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'), - poolfile = relation(PoolFile, backref='buildqueueinstances'))) + mapper(PendingBinContents, self.tbl_pending_bin_contents, + properties = dict(contents_id =self.tbl_pending_bin_contents.c.id, + filename = self.tbl_pending_bin_contents.c.filename, + package = self.tbl_pending_bin_contents.c.package, + version = self.tbl_pending_bin_contents.c.version, + arch = self.tbl_pending_bin_contents.c.arch, + otype = self.tbl_pending_bin_contents.c.type)) + + mapper(DebContents, self.tbl_deb_contents, + properties = dict(binary_id=self.tbl_deb_contents.c.binary_id, + package=self.tbl_deb_contents.c.package, + component=self.tbl_deb_contents.c.component, + arch=self.tbl_deb_contents.c.arch, + section=self.tbl_deb_contents.c.section, + filename=self.tbl_deb_contents.c.filename)) + + mapper(UdebContents, self.tbl_udeb_contents, + properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id, + package=self.tbl_udeb_contents.c.package, + component=self.tbl_udeb_contents.c.component, + arch=self.tbl_udeb_contents.c.arch, + section=self.tbl_udeb_contents.c.section, + filename=self.tbl_udeb_contents.c.filename)) mapper(DBBinary, self.tbl_binaries, properties = dict(binary_id = self.tbl_binaries.c.id, @@@ -2726,11 -2868,6 +2909,11 @@@ source_files = relation(ChangePendingFile, secondary=self.tbl_changes_pending_source_files, backref="pending_sources"))) + files = relation(KnownChangePendingFile, backref="changesfile"))) + + mapper(KnownChangePendingFile, self.tbl_changes_pending_files, + properties = dict(known_change_pending_file_id = self.tbl_changes_pending_files.c.id)) + mapper(KeyringACLMap, self.tbl_keyring_acl_map, properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id, keyring = relation(Keyring, backref="keyring_acl_map"), @@@ -2753,7 -2890,6 +2936,7 @@@ mapper(Override, self.tbl_override, properties = dict(suite_id = self.tbl_override.c.suite, suite = relation(Suite), + package = self.tbl_override.c.package, component_id = self.tbl_override.c.component, component = relation(Component), priority_id = self.tbl_override.c.priority, @@@ -2774,8 -2910,7 +2957,8 @@@ properties = dict(priority_id = self.tbl_priority.c.id)) mapper(Section, self.tbl_section, - properties = dict(section_id = self.tbl_section.c.id)) + properties = dict(section_id = self.tbl_section.c.id, + section=self.tbl_section.c.section)) mapper(DBSource, self.tbl_source, properties = dict(source_id = self.tbl_source.c.id,