From: Mark Hymers Date: Tue, 3 Nov 2009 17:33:10 +0000 (+0000) Subject: first pass at manage-build-queues X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=c55d086c0c8463fcd2d8dcee2dcc6c414ee36d91;p=dak.git first pass at manage-build-queues Signed-off-by: Mark Hymers --- diff --git a/dak/dakdb/update24.py b/dak/dakdb/update24.py new file mode 100755 index 00000000..4e8c505d --- /dev/null +++ b/dak/dakdb/update24.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python + +""" +Add some meta info to queues + +@contact: Debian FTP Master +@copyright: 2009 Mark Hymers +@license: GNU General Public License version 2 or later +""" + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +import psycopg2 + +def do_update(self): + print "Add meta info columns to queues." + + try: + c = self.db.cursor() + + c.execute("ALTER TABLE policy_queue ADD COLUMN generate_metadata BOOL DEFAULT FALSE NOT NULL") + c.execute("ALTER TABLE policy_queue ADD COLUMN origin TEXT DEFAULT NULL") + c.execute("ALTER TABLE policy_queue ADD COLUMN label TEXT DEFAULT NULL") + c.execute("ALTER TABLE policy_queue ADD COLUMN releasedescription TEXT DEFAULT NULL") + c.execute("ALTER TABLE policy_queue ADD COLUMN signingkey TEXT DEFAULT NULL") + c.execute("ALTER TABLE policy_queue ADD COLUMN stay_of_execution INT4 NOT NULL DEFAULT 86400 CHECK (stay_of_execution >= 0)") + c.execute("""ALTER TABLE policy_queue + ADD CONSTRAINT policy_queue_meta_sanity_check + CHECK ( (generate_metadata IS FALSE) + OR (origin IS NOT NULL AND label IS NOT NULL AND releasedescription IS NOT NULL) )""") + + c.execute("ALTER TABLE build_queue ADD COLUMN generate_metadata BOOL DEFAULT FALSE NOT NULL") + c.execute("ALTER TABLE build_queue ADD COLUMN origin TEXT DEFAULT NULL") + c.execute("ALTER TABLE build_queue ADD COLUMN label TEXT DEFAULT NULL") + c.execute("ALTER TABLE build_queue ADD COLUMN releasedescription TEXT DEFAULT NULL") + c.execute("ALTER TABLE build_queue ADD COLUMN signingkey TEXT DEFAULT NULL") + c.execute("ALTER TABLE build_queue ADD COLUMN stay_of_execution INT4 NOT NULL DEFAULT 86400 CHECK (stay_of_execution >= 0)") + c.execute("""ALTER TABLE build_queue + ADD CONSTRAINT build_queue_meta_sanity_check + CHECK ( (generate_metadata IS FALSE) + OR (origin IS NOT NULL AND label IS NOT NULL AND releasedescription IS NOT NULL) )""") + + print "Committing" + c.execute("UPDATE config SET value = '24' WHERE name = 'db_revision'") + self.db.commit() + + except psycopg2.InternalError, msg: + self.db.rollback() + raise DBUpdateError, "Database error, rollback issued. Error message : %s" % (str(msg)) + diff --git a/dak/manage_build_queues.py b/dak/manage_build_queues.py new file mode 100755 index 00000000..402521f0 --- /dev/null +++ b/dak/manage_build_queues.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python + +"""Manage build queues""" +# Copyright (C) 2000, 2001, 2002, 2006 James Troup +# Copyright (C) 2009 Mark Hymers + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +################################################################################ + +import os, os.path, stat, sys +from datetime import datetime +import apt_pkg + +from daklib import daklog +from daklib.dbconn import * +from daklib.config import Config + +################################################################################ + +Options = None +Logger = None + +################################################################################ + +def usage (exit_code=0): + print """Usage: dak manage-build-queues [OPTIONS] buildqueue1 buildqueue2 +Manage the contents of one or more build queues + + -n, --no-action don't do anything + -v, --verbose explain what is being done + -h, --help show this help and exit""" + + sys.exit(exit_code) + +################################################################################ + +def main (): + global Options, Logger + + cnf = Config() + + for i in ["Help", "No-Action", "Verbose" ]: + if not cnf.has_key("Manage-Build-Queues::Options::%s" % (i)): + cnf["Manage-Build-Queues::Options::%s" % (i)] = "" + + Arguments = [('h',"help","Manage-Build-Queues::Options::Help"), + ('n',"no-action","Manage-Build-Queues::Options::No-Action"), + ('v',"verbose","Manage-Build-Queues::Options::Verbose")] + + queue_names = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv) + Options = cnf.SubTree("Manage-Build-Queues::Options") + + if Options["Help"]: + usage() + + Logger = daklog.Logger(cnf, 'manage-build-queues', Options['No-Action']) + + starttime = datetime.now() + + # For each given queue, look up object and call manage_queue + for q in queue_names: + session = DBConn().session() + queue = get_build_queue(q.lower(), session) + if queue: + Logger.log(['cleaning queue %s using datetime %s' % (q, starttime)]) + queue.clean_and_update(starttime) + else: + Logger.log(['cannot find queue %s' % q]) + + Logger.close() + +####################################################################################### + +if __name__ == '__main__': + main() diff --git a/dak/update_db.py b/dak/update_db.py index 49a6b584..ecdd99a7 100755 --- a/dak/update_db.py +++ b/dak/update_db.py @@ -45,7 +45,7 @@ from daklib.dak_exceptions import DBUpdateError ################################################################################ Cnf = None -required_database_schema = 23 +required_database_schema = 24 ################################################################################ diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 88791851..8543ab10 100644 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -37,7 +37,9 @@ import os import re import psycopg2 import traceback -from datetime import datetime +from datetime import datetime, timedelta +from errno import ENOENT +from tempfile import mkstemp, mkdtemp from inspect import getargspec @@ -429,6 +431,45 @@ __all__.append('BinaryACLMap') ################################################################################ +MINIMAL_APT_CONF=""" +Dir +{ + ArchiveDir "%(archivepath)s"; + OverrideDir "/srv/ftp.debian.org/scripts/override/"; + CacheDir "/srv/ftp.debian.org/database/"; +}; + +Default +{ + Packages::Compress ". bzip2 gzip"; + Sources::Compress ". bzip2 gzip"; + DeLinkLimit 0; + FileMode 0664; +} + +bindirectory "incoming" +{ + Packages "Packages"; + Contents " "; + + BinOverride "override.sid.all3"; + BinCacheDB "packages-accepted.db"; + + FileList "%(filelist)s"; + + PathPrefix ""; + Packages::Extensions ".deb .udeb"; +}; + +bindirectory "incoming/" +{ + Sources "Sources"; + BinOverride "override.sid.all3"; + SrcOverride "override.sid.all3.src"; + FileList "%(filelist)s"; +}; +""" + class BuildQueue(object): def __init__(self, *args, **kwargs): pass @@ -436,6 +477,121 @@ class BuildQueue(object): def __repr__(self): return '' % self.queue_name + def write_metadata(self, ourtime, force=False): + # Do we write out metafiles? + if not (force or self.generate_metadata): + return + + session = DBConn().session().object_session(self) + + fl_fd = fl_name = ac_fd = ac_name = None + tempdir = None + arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ]) + startdir = os.getcwd() + + try: + # Grab files we want to include + newer = session.query(BuildQueueFile).filter_by(build_queue_id = 1).filter(BuildQueueFile.lastused > ourtime).all() + + # Write file list with newer files + (fl_fd, fl_name) = mkstemp() + for n in newer: + os.write(fl_fd, '%s\n' % n.fullpath) + os.close(fl_fd) + + # Write minimal apt.conf + # TODO: Remove hardcoding from template + (ac_fd, ac_name) = mkstemp() + os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path, + 'filelist': fl_name}) + os.close() + + # Run apt-ftparchive generate + os.chdir(os.path.dirname(fl_name)) + os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(fl_name)) + + # Run apt-ftparchive release + # TODO: Eww - fix this + bname = os.path.basename(self.path) + os.chdir(self.path) + os.chdir('..') + os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="${archs}" release %s > Release""", [self.origin, self.label, self.releasedescription, arches, bname]) + + # Sign if necessary + if self.signingkey: + cnf = Config() + keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"] + if cnf.has_key("Dinstall::SigningPubKeyring"): + keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"] + + os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey)) + + # Move the files if we got this far + os.rename('Release', os.path.join(bname, 'Release')) + if self.signingkey: + os.rename('Release.gpg', os.path.join(bname, 'Release.gpg')) + + # Clean up any left behind files + finally: + os.chdir(startdir) + if fl_fd: + try: + os.close(fl_fd) + except OSError: + pass + + if fl_name: + try: + os.unlink(fl_name) + except OSError: + pass + + if ac_fd: + try: + os.close(ac_fd) + except OSError: + pass + + if ac_name: + try: + os.unlink(ac_name) + except OSError: + pass + + def clean_and_update(self, starttime, dryrun=False): + """WARNING: This routine commits for you""" + session = DBConn().session().object_session(self) + + ourtime = starttime + timedelta(seconds=self.stay_of_execution) + + if self.generate_metadata: + self.write_metadata(ourtime) + + # Grab files older than our execution time + older = session.query(BuildQueueFile).filter_by(build_queue_id = 1).filter(BuildQueueFile.lastused <= ourtime).all() + + for o in older: + killdb = False + try: + if dryrun: + print "I: Would have removed %s from the queue" + else: + os.unlink(o.fullpath) + killdb = True + except OSError, e: + # If it wasn't there, don't worry + if e.errno == ENOENT: + killdb = True + else: + # TODO: Replace with proper logging call + print "E: Could not remove %s" % o.fullpath + + if killdb: + session.delete(o) + + session.commit() + + def add_file_from_pool(self, poolfile): """Copies a file into the pool. Assumes that the PoolFile object is attached to the same SQLAlchemy session as the Queue object is. @@ -518,6 +674,11 @@ class BuildQueueFile(object): def __repr__(self): return '' % (self.filename, self.build_queue_id) + @property + def fullpath(self): + return os.path.join(self.buildqueue.path, self.filename) + + __all__.append('BuildQueueFile') ################################################################################