--- /dev/null
+#!/usr/bin/env python
+
+"""
+Add some meta info to queues
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+import psycopg2
+
+def do_update(self):
+ print "Add meta info columns to queues."
+
+ try:
+ c = self.db.cursor()
+
+ c.execute("ALTER TABLE policy_queue ADD COLUMN generate_metadata BOOL DEFAULT FALSE NOT NULL")
+ c.execute("ALTER TABLE policy_queue ADD COLUMN origin TEXT DEFAULT NULL")
+ c.execute("ALTER TABLE policy_queue ADD COLUMN label TEXT DEFAULT NULL")
+ c.execute("ALTER TABLE policy_queue ADD COLUMN releasedescription TEXT DEFAULT NULL")
+ c.execute("ALTER TABLE policy_queue ADD COLUMN signingkey TEXT DEFAULT NULL")
+ c.execute("ALTER TABLE policy_queue ADD COLUMN stay_of_execution INT4 NOT NULL DEFAULT 86400 CHECK (stay_of_execution >= 0)")
+ c.execute("""ALTER TABLE policy_queue
+ ADD CONSTRAINT policy_queue_meta_sanity_check
+ CHECK ( (generate_metadata IS FALSE)
+ OR (origin IS NOT NULL AND label IS NOT NULL AND releasedescription IS NOT NULL) )""")
+
+ c.execute("ALTER TABLE build_queue ADD COLUMN generate_metadata BOOL DEFAULT FALSE NOT NULL")
+ c.execute("ALTER TABLE build_queue ADD COLUMN origin TEXT DEFAULT NULL")
+ c.execute("ALTER TABLE build_queue ADD COLUMN label TEXT DEFAULT NULL")
+ c.execute("ALTER TABLE build_queue ADD COLUMN releasedescription TEXT DEFAULT NULL")
+ c.execute("ALTER TABLE build_queue ADD COLUMN signingkey TEXT DEFAULT NULL")
+ c.execute("ALTER TABLE build_queue ADD COLUMN stay_of_execution INT4 NOT NULL DEFAULT 86400 CHECK (stay_of_execution >= 0)")
+ c.execute("""ALTER TABLE build_queue
+ ADD CONSTRAINT build_queue_meta_sanity_check
+ CHECK ( (generate_metadata IS FALSE)
+ OR (origin IS NOT NULL AND label IS NOT NULL AND releasedescription IS NOT NULL) )""")
+
+ print "Committing"
+ c.execute("UPDATE config SET value = '24' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.InternalError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Database error, rollback issued. Error message : %s" % (str(msg))
+
--- /dev/null
+#!/usr/bin/env python
+
+"""Manage build queues"""
+# Copyright (C) 2000, 2001, 2002, 2006 James Troup <james@nocrew.org>
+# Copyright (C) 2009 Mark Hymers <mhy@debian.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import os, os.path, stat, sys
+from datetime import datetime
+import apt_pkg
+
+from daklib import daklog
+from daklib.dbconn import *
+from daklib.config import Config
+
+################################################################################
+
+Options = None
+Logger = None
+
+################################################################################
+
+def usage (exit_code=0):
+ print """Usage: dak manage-build-queues [OPTIONS] buildqueue1 buildqueue2
+Manage the contents of one or more build queues
+
+ -n, --no-action don't do anything
+ -v, --verbose explain what is being done
+ -h, --help show this help and exit"""
+
+ sys.exit(exit_code)
+
+################################################################################
+
+def main ():
+ global Options, Logger
+
+ cnf = Config()
+
+ for i in ["Help", "No-Action", "Verbose" ]:
+ if not cnf.has_key("Manage-Build-Queues::Options::%s" % (i)):
+ cnf["Manage-Build-Queues::Options::%s" % (i)] = ""
+
+ Arguments = [('h',"help","Manage-Build-Queues::Options::Help"),
+ ('n',"no-action","Manage-Build-Queues::Options::No-Action"),
+ ('v',"verbose","Manage-Build-Queues::Options::Verbose")]
+
+ queue_names = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
+ Options = cnf.SubTree("Manage-Build-Queues::Options")
+
+ if Options["Help"]:
+ usage()
+
+ Logger = daklog.Logger(cnf, 'manage-build-queues', Options['No-Action'])
+
+ starttime = datetime.now()
+
+ # For each given queue, look up object and call manage_queue
+ for q in queue_names:
+ session = DBConn().session()
+ queue = get_build_queue(q.lower(), session)
+ if queue:
+ Logger.log(['cleaning queue %s using datetime %s' % (q, starttime)])
+ queue.clean_and_update(starttime)
+ else:
+ Logger.log(['cannot find queue %s' % q])
+
+ Logger.close()
+
+#######################################################################################
+
+if __name__ == '__main__':
+ main()
################################################################################
Cnf = None
-required_database_schema = 23
+required_database_schema = 24
################################################################################
import re
import psycopg2
import traceback
-from datetime import datetime
+from datetime import datetime, timedelta
+from errno import ENOENT
+from tempfile import mkstemp, mkdtemp
from inspect import getargspec
################################################################################
+MINIMAL_APT_CONF="""
+Dir
+{
+ ArchiveDir "%(archivepath)s";
+ OverrideDir "/srv/ftp.debian.org/scripts/override/";
+ CacheDir "/srv/ftp.debian.org/database/";
+};
+
+Default
+{
+ Packages::Compress ". bzip2 gzip";
+ Sources::Compress ". bzip2 gzip";
+ DeLinkLimit 0;
+ FileMode 0664;
+}
+
+bindirectory "incoming"
+{
+ Packages "Packages";
+ Contents " ";
+
+ BinOverride "override.sid.all3";
+ BinCacheDB "packages-accepted.db";
+
+ FileList "%(filelist)s";
+
+ PathPrefix "";
+ Packages::Extensions ".deb .udeb";
+};
+
+bindirectory "incoming/"
+{
+ Sources "Sources";
+ BinOverride "override.sid.all3";
+ SrcOverride "override.sid.all3.src";
+ FileList "%(filelist)s";
+};
+"""
+
class BuildQueue(object):
def __init__(self, *args, **kwargs):
pass
def __repr__(self):
return '<BuildQueue %s>' % self.queue_name
+ def write_metadata(self, ourtime, force=False):
+ # Do we write out metafiles?
+ if not (force or self.generate_metadata):
+ return
+
+ session = DBConn().session().object_session(self)
+
+ fl_fd = fl_name = ac_fd = ac_name = None
+ tempdir = None
+ arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
+ startdir = os.getcwd()
+
+ try:
+ # Grab files we want to include
+ newer = session.query(BuildQueueFile).filter_by(build_queue_id = 1).filter(BuildQueueFile.lastused > ourtime).all()
+
+ # Write file list with newer files
+ (fl_fd, fl_name) = mkstemp()
+ for n in newer:
+ os.write(fl_fd, '%s\n' % n.fullpath)
+ os.close(fl_fd)
+
+ # Write minimal apt.conf
+ # TODO: Remove hardcoding from template
+ (ac_fd, ac_name) = mkstemp()
+ os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
+ 'filelist': fl_name})
+ os.close()
+
+ # Run apt-ftparchive generate
+ os.chdir(os.path.dirname(fl_name))
+ os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(fl_name))
+
+ # Run apt-ftparchive release
+ # TODO: Eww - fix this
+ bname = os.path.basename(self.path)
+ os.chdir(self.path)
+ os.chdir('..')
+ os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="${archs}" release %s > Release""", [self.origin, self.label, self.releasedescription, arches, bname])
+
+ # Sign if necessary
+ if self.signingkey:
+ cnf = Config()
+ keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
+ if cnf.has_key("Dinstall::SigningPubKeyring"):
+ keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
+
+ os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
+
+ # Move the files if we got this far
+ os.rename('Release', os.path.join(bname, 'Release'))
+ if self.signingkey:
+ os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
+
+ # Clean up any left behind files
+ finally:
+ os.chdir(startdir)
+ if fl_fd:
+ try:
+ os.close(fl_fd)
+ except OSError:
+ pass
+
+ if fl_name:
+ try:
+ os.unlink(fl_name)
+ except OSError:
+ pass
+
+ if ac_fd:
+ try:
+ os.close(ac_fd)
+ except OSError:
+ pass
+
+ if ac_name:
+ try:
+ os.unlink(ac_name)
+ except OSError:
+ pass
+
+ def clean_and_update(self, starttime, dryrun=False):
+ """WARNING: This routine commits for you"""
+ session = DBConn().session().object_session(self)
+
+ ourtime = starttime + timedelta(seconds=self.stay_of_execution)
+
+ if self.generate_metadata:
+ self.write_metadata(ourtime)
+
+ # Grab files older than our execution time
+ older = session.query(BuildQueueFile).filter_by(build_queue_id = 1).filter(BuildQueueFile.lastused <= ourtime).all()
+
+ for o in older:
+ killdb = False
+ try:
+ if dryrun:
+ print "I: Would have removed %s from the queue"
+ else:
+ os.unlink(o.fullpath)
+ killdb = True
+ except OSError, e:
+ # If it wasn't there, don't worry
+ if e.errno == ENOENT:
+ killdb = True
+ else:
+ # TODO: Replace with proper logging call
+ print "E: Could not remove %s" % o.fullpath
+
+ if killdb:
+ session.delete(o)
+
+ session.commit()
+
+
def add_file_from_pool(self, poolfile):
"""Copies a file into the pool. Assumes that the PoolFile object is
attached to the same SQLAlchemy session as the Queue object is.
def __repr__(self):
return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
+ @property
+ def fullpath(self):
+ return os.path.join(self.buildqueue.path, self.filename)
+
+
__all__.append('BuildQueueFile')
################################################################################