From: Mark Hymers Date: Mon, 26 Oct 2009 08:58:12 +0000 (+0000) Subject: Merge commit 'ftpmaster/master' into sqlalchemy X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=1c35448b880358d020e81339657e3435fdda9434;hp=-c;p=dak.git Merge commit 'ftpmaster/master' into sqlalchemy Conflicts: dak/override.py dak/process_unchecked.py Signed-off-by: Mark Hymers --- 1c35448b880358d020e81339657e3435fdda9434 diff --combined dak/contents.py index b94aa0de,1b3f3e1f..9ac99951 --- a/dak/contents.py +++ b/dak/contents.py @@@ -45,8 -45,7 +45,8 @@@ import apt_pk from daklib import utils from daklib.binary import Binary from daklib.config import Config -from daklib.dbconn import DBConn +from daklib.dbconn import * + ################################################################################ def usage (exit_code=0): @@@ -88,6 -87,92 +88,6 @@@ log = logging.getLogger( ################################################################################ -# get all the arches delivered for a given suite -# this should probably exist somehere common -arches_q = """PREPARE arches_q(int) as - SELECT s.architecture, a.arch_string - FROM suite_architectures s - JOIN architecture a ON (s.architecture=a.id) - WHERE suite = $1""" - -# find me the .deb for a given binary id -debs_q = """PREPARE debs_q(int, int) as - SELECT b.id, f.filename FROM bin_assoc_by_arch baa - JOIN binaries b ON baa.bin=b.id - JOIN files f ON b.file=f.id - WHERE suite = $1 - AND arch = $2""" - -# ask if we already have contents associated with this binary -olddeb_q = """PREPARE olddeb_q(int) as - SELECT 1 FROM content_associations - WHERE binary_pkg = $1 - LIMIT 1""" - -# find me all of the contents for a given .deb -contents_q = """PREPARE contents_q(int,int) as - SELECT (p.path||'/'||n.file) AS fn, - s.section, - b.package, - b.architecture - FROM content_associations c join content_file_paths p ON (c.filepath=p.id) - JOIN content_file_names n ON (c.filename=n.id) - JOIN binaries b ON (b.id=c.binary_pkg) - JOIN override o ON (o.package=b.package) - JOIN section s ON (s.id=o.section) - WHERE o.suite = $1 AND o.type = $2 - AND b.type='deb' - ORDER BY fn""" - -# find me all of the contents for a given .udeb -udeb_contents_q = """PREPARE udeb_contents_q(int,int,int) as - SELECT (p.path||'/'||n.file) AS fn, - s.section, - b.package, - b.architecture - FROM content_associations c join content_file_paths p ON (c.filepath=p.id) - JOIN content_file_names n ON (c.filename=n.id) - JOIN binaries b ON (b.id=c.binary_pkg) - JOIN override o ON (o.package=b.package) - JOIN section s ON (s.id=o.section) - WHERE o.suite = $1 AND o.type = $2 - AND s.id = $3 - AND b.type='udeb' - ORDER BY fn""" - -# FROM content_file_paths p join content_associations c ON (c.filepath=p.id) -# JOIN content_file_names n ON (c.filename=n.id) -# JOIN binaries b ON (b.id=c.binary_pkg) -# JOIN override o ON (o.package=b.package) -# JOIN section s ON (s.id=o.section) -# WHERE o.suite = $1 AND o.type = $2 -# AND s.id = $3 -# AND b.id in (SELECT ba.bin from bin_associations ba join binaries b on b.id=ba.bin where (b.architecture=$3 or b.architecture=$4)and ba.suite=$1 and b.type='udeb') -# GROUP BY fn -# ORDER BY fn;""" - - - -# clear out all of the temporarily stored content associations -# this should be run only after p-a has run. after a p-a -# run we should have either accepted or rejected every package -# so there should no longer be anything in the queue -remove_pending_contents_cruft_q = """DELETE FROM pending_content_associations""" - -# delete any filenames we are storing which have no binary associated with them -remove_filename_cruft_q = """DELETE FROM content_file_names - WHERE id IN (SELECT cfn.id FROM content_file_names cfn - LEFT JOIN content_associations ca - ON ca.filename=cfn.id - WHERE ca.id IS NULL)""" - -# delete any paths we are storing which have no binary associated with them -remove_filepath_cruft_q = """DELETE FROM content_file_paths - WHERE id IN (SELECT cfn.id FROM content_file_paths cfn - LEFT JOIN content_associations ca - ON ca.filepath=cfn.id - WHERE ca.id IS NULL)""" - class EndOfContents(object): """ A sentry object for the end of the filename stream @@@ -104,7 -189,7 +104,7 @@@ class GzippedContentWriter(object) def __init__(self, filename): """ - @ptype filename: string + @type filename: string @param filename: the name of the file to write to """ self.queue = Queue.Queue() @@@ -193,6 -278,8 +193,6 @@@ class Contents(object): """ Class capable of generating Contents-$arch.gz files - - Usage GenerateContents().generateContents( ["main","contrib","non-free"] ) """ def __init__(self): @@@ -201,34 -288,20 +201,34 @@@ def reject(self, message): log.error("E: %s" % message) - # goal column for section column - _goal_column = 54 - def cruft(self): """ remove files/paths from the DB which are no longer referenced by binaries and clean the temporary table """ - cursor = DBConn().cursor(); - cursor.execute( "BEGIN WORK" ) - cursor.execute( remove_pending_contents_cruft_q ) - cursor.execute( remove_filename_cruft_q ) - cursor.execute( remove_filepath_cruft_q ) - cursor.execute( "COMMIT" ) + s = DBConn().session() + + # clear out all of the temporarily stored content associations + # this should be run only after p-a has run. after a p-a + # run we should have either accepted or rejected every package + # so there should no longer be anything in the queue + s.query(PendingContentAssociation).delete() + + # delete any filenames we are storing which have no binary associated + # with them + cafq = s.query(ContentAssociation.filename_id).distinct() + cfq = s.query(ContentFilename) + cfq = cfq.filter(~ContentFilename.cafilename_id.in_(cafq)) + cfq.delete() + + # delete any paths we are storing which have no binary associated with + # them + capq = s.query(ContentAssociation.filepath_id).distinct() + cpq = s.query(ContentFilepath) + cpq = cpq.filter(~ContentFilepath.cafilepath_id.in_(capq)) + cpq.delete() + + s.commit() def bootstrap(self): @@@ -237,80 -310,170 +237,80 @@@ """ pooldir = Config()[ 'Dir::Pool' ] - cursor = DBConn().cursor(); - DBConn().prepare("debs_q",debs_q) - DBConn().prepare("olddeb_q",olddeb_q) - DBConn().prepare("arches_q",arches_q) - - suites = self._suites() - for suite in [i.lower() for i in suites]: - suite_id = DBConn().get_suite_id(suite) - - arch_list = self._arches(cursor, suite_id) - arch_all_id = DBConn().get_architecture_id("all") - for arch_id in arch_list: - cursor.execute( "EXECUTE debs_q(%d, %d)" % ( suite_id, arch_id[0] ) ) - - count = 0 - while True: - deb = cursor.fetchone() - if not deb: - break - count += 1 - cursor1 = DBConn().cursor(); - cursor1.execute( "EXECUTE olddeb_q(%d)" % (deb[0] ) ) - old = cursor1.fetchone() - if old: - log.debug( "already imported: %s" % (deb[1]) ) + s = DBConn().session() + + for suite in s.query(Suite).all(): + for arch in get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=s): + q = s.query(BinAssociation).join(Suite) + q = q.join(Suite).filter_by(suite_name=suite.suite_name) + q = q.join(DBBinary).join(Architecture).filter_by(arch.arch_string) + for ba in q: + filename = ba.binary.poolfile.filename + # Check for existing contents + existingq = s.query(ContentAssociations).filter_by(binary_pkg=ba.binary_id).limit(1) + if existingq.count() > 0: + log.debug( "already imported: %s" % (filename)) else: - log.debug( "scanning: %s" % (deb[1]) ) - debfile = os.path.join( pooldir, deb[1] ) - if os.path.exists( debfile ): - Binary(debfile, self.reject).scan_package(deb[0],True) + # We don't have existing contents so import them + log.debug( "scanning: %s" % (filename) ) + debfile = os.path.join(pooldir, filename) + if os.path.exists(debfile): + Binary(debfile, self.reject).scan_package(ba.binary_id, True) else: - log.error("missing .deb: %s" % deb[1]) + log.error("missing .deb: %s" % filename) + def generate(self): """ Generate Contents-$arch.gz files for every available arch in each given suite. """ - cursor = DBConn().cursor() - - DBConn().prepare("arches_q", arches_q) - DBConn().prepare("contents_q", contents_q) - DBConn().prepare("udeb_contents_q", udeb_contents_q) - - debtype_id=DBConn().get_override_type_id("deb") - udebtype_id=DBConn().get_override_type_id("udeb") - - arch_all_id = DBConn().get_architecture_id("all") - suites = self._suites() - - - # Get our suites, and the architectures - for suite in [i.lower() for i in suites]: - suite_id = DBConn().get_suite_id(suite) - arch_list = self._arches(cursor, suite_id) - - file_writers = {} - - try: - for arch_id in arch_list: - file_writers[arch_id[0]] = GzippedContentWriter("dists/%s/Contents-%s.gz" % (suite, arch_id[1])) - - cursor.execute("EXECUTE contents_q(%d,%d);" % (suite_id, debtype_id)) - - while True: - r = cursor.fetchone() - if not r: - break - - filename, section, package, arch = r - - if not file_writers.has_key( arch ): - continue - - if arch == arch_all_id: - ## its arch all, so all contents files get it - for writer in file_writers.values(): - writer.write(filename, section, package) - - else: - file_writers[arch].write(filename, section, package) - - finally: - # close all the files - for writer in file_writers.values(): - writer.finish() - + session = DBConn().session() - # The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free) - # This is HORRIBLY debian specific :-/ - for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s.gz"), - ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]: + arch_all_id = get_architecture("all", session).arch_id - section_id = DBConn().get_section_id(section) # all udebs should be here) - if section_id != -1: + # The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free) + # This is HORRIBLY debian specific :-/ + for dtype, section, fn_pattern in \ + [('deb', None, "dists/%s/Contents-%s.gz"), + ('udeb', "debian-installer", "dists/%s/Contents-udeb-%s.gz"), + ('udeb', "non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]: - # Get our suites, and the architectures - for suite in [i.lower() for i in suites]: - suite_id = DBConn().get_suite_id(suite) - arch_list = self._arches(cursor, suite_id) + overridetype = get_override_type(dtype, session) - file_writers = {} + # For udebs, we only look in certain sections (see the for loop above) + if section is not None: + section = get_section(section, session) - try: - for arch_id in arch_list: - file_writers[arch_id[0]] = GzippedContentWriter(fn_pattern % (suite, arch_id[1])) + # Get our suites + for suite in which_suites(): + # Which architectures do we need to work on + arch_list = get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=session) - cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (suite_id, udebtype_id, section_id)) - - while True: - r = cursor.fetchone() - if not r: - break - - filename, section, package, arch = r - - if not file_writers.has_key( arch ): - continue - - if arch == arch_all_id: - ## its arch all, so all contents files get it - for writer in file_writers.values(): - writer.write(filename, section, package) - - else: - file_writers[arch].write(filename, section, package) - finally: - # close all the files - for writer in file_writers.values(): - writer.finish() - - - -################################################################################ - - def _suites(self): - """ - return a list of suites to operate on - """ - if Config().has_key( "%s::%s" %(options_prefix,"Suite")): - suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")]) - else: - suites = Config().SubTree("Suite").List() - - return suites + # Set up our file writer dictionary + file_writers = {} + try: + # One file writer per arch + for arch in arch_list: + file_writers[arch.arch_id] = GzippedContentWriter(fn_pattern % (suite, arch.arch_string)) - def _arches(self, cursor, suite): - """ - return a list of archs to operate on - """ - arch_list = [] - cursor.execute("EXECUTE arches_q(%d)" % (suite)) - while True: - r = cursor.fetchone() - if not r: - break + for r in get_suite_contents(suite, overridetype, section, session=session).fetchall(): + filename, section, package, arch_id = r - if r[1] != "source" and r[1] != "all": - arch_list.append((r[0], r[1])) + if arch_id == arch_all_id: + # It's arch all, so all contents files get it + for writer in file_writers.values(): + writer.write(filename, section, package) + else: + if file_writers.has_key(arch_id): + file_writers[arch_id].write(filename, section, package) - return arch_list + finally: + # close all the files + for writer in file_writers.values(): + writer.finish() ################################################################################ - def main(): cnf = Config() @@@ -347,17 -510,5 +347,17 @@@ commands[args[0]](Contents()) +def which_suites(session): + """ + return a list of suites to operate on + """ + if Config().has_key( "%s::%s" %(options_prefix,"Suite")): + suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")]) + else: + suites = Config().SubTree("Suite").List() + + return [get_suite(s.lower(), session) for s in suites] + + if __name__ == '__main__': main() diff --combined dak/generate_releases.py index 0bd7a69f,8bf3622d..11e37807 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@@ -22,17 -22,17 +22,17 @@@ ################################################################################ -import sys, os, stat, time, pg +import sys, os, stat, time import gzip, bz2 import apt_pkg + from daklib import utils -from daklib import database from daklib.dak_exceptions import * +from daklib.dbconn import * ################################################################################ Cnf = None -projectB = None out = None AptCnf = None @@@ -59,6 -59,24 +59,24 @@@ def add_tiffani (files, path, indexstem #print "ALERT: there was a tiffani file %s" % (filepath) files.append(index) + def gen_i18n_index (files, tree, sec): + path = Cnf["Dir::Root"] + tree + "/" + i18n_path = "%s/i18n" % (sec) + if os.path.exists("%s/%s" % (path, i18n_path)): + index = "%s/Index" % (i18n_path) + out = open("%s/%s" % (path, index), "w") + out.write("SHA1:\n") + for x in os.listdir("%s/%s" % (path, i18n_path)): + if x.startswith('Translation-'): + f = open("%s/%s/%s" % (path, i18n_path, x), "r") + size = os.fstat(f.fileno())[6] + f.seek(0) + sha1sum = apt_pkg.sha1sum(f) + f.close() + out.write(" %s %7d %s\n" % (sha1sum, size, x)) + out.close() + files.append(index) + def compressnames (tree,type,file): compress = AptCnf.get("%s::%s::Compress" % (tree,type), AptCnf.get("Default::%s::Compress" % (type), ". gzip")) result = [] @@@ -149,7 -167,7 +167,7 @@@ def write_release_file (relpath, suite ################################################################################ def main (): - global Cnf, AptCnf, projectB, out + global Cnf, AptCnf, out out = sys.stdout Cnf = utils.get_conf() @@@ -174,29 -192,38 +192,29 @@@ AptCnf = apt_pkg.newConfiguration() apt_pkg.ReadConfigFileISC(AptCnf, Options["Apt-Conf"]) - projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"])) - database.init(Cnf, projectB) - if not suites: suites = Cnf.SubTree("Suite").List() - for suite in suites: - print "Processing: " + suite + for suitename in suites: + print "Processing: " + suitename SuiteBlock = Cnf.SubTree("Suite::" + suite) + suiteobj = get_suite(suitename) + + suite = suite.suite_name.lower() - if database.get_suite_untouchable(suite) and not Options["Force-Touch"]: + if suite.untouchable and not Options["Force-Touch"]: print "Skipping: " + suite + " (untouchable)" continue - suite = suite.lower() - - origin = SuiteBlock["Origin"] - label = SuiteBlock.get("Label", origin) - codename = SuiteBlock.get("CodeName", "") - + origin = suite.origin + label = suite.label or suite.origin + codename = suite.codename or "" version = "" - description = "" - - q = projectB.query("SELECT version, description FROM suite WHERE suite_name = '%s'" % (suite)) - qs = q.getresult() - if len(qs) == 1: - if qs[0][0] != "-": version = qs[0][0] - if qs[0][1]: description = qs[0][1] + if suite.version and suite.version != '-': + version = suite.version + description = suite.description or "" - architectures = database.get_suite_architectures(suite) - if architectures == None: - architectures = [] + architectures = get_suite_architectures(suite, skipall=True, skipsrc=True) if SuiteBlock.has_key("NotAutomatic"): notautomatic = "yes" @@@ -228,7 -255,7 +246,7 @@@ print Cnf["Dir::Root"] + tree + "/Release" out = open(Cnf["Dir::Root"] + tree + "/Release", "w") - out.write("Origin: %s\n" % (origin)) + out.write("Origin: %s\n" % (suiteobj.origin)) out.write("Label: %s\n" % (label)) out.write("Suite: %s\n" % (suite)) if version != "": @@@ -243,7 -270,7 +261,7 @@@ if notautomatic != "": out.write("NotAutomatic: %s\n" % (notautomatic)) - out.write("Architectures: %s\n" % (" ".join(filter(utils.real_arch, architectures)))) + out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures]))) if components: out.write("Components: %s\n" % (" ".join(components))) @@@ -258,7 -285,10 +276,10 @@@ else: for x in os.listdir("%s/%s" % (Cnf["Dir::Root"], tree)): if x.startswith('Contents-'): - files.append(x) + if x.endswith('.diff'): + files.append("%s/Index" % (x)) + else: + files.append(x) for sec in AptCnf["tree::%s::Sections" % (tree)].split(): for arch in AptCnf["tree::%s::Architectures" % (tree)].split(): @@@ -287,6 -317,7 +308,7 @@@ relpath = Cnf["Dir::Root"]+tree+"/"+rel write_release_file(relpath, suite, sec, origin, label, arch, version, suite_suffix, notautomatic) files.append(rel) + gen_i18n_index(files, tree, sec) if AptCnf.has_key("tree::%s/main" % (tree)): for dis in ["main", "contrib", "non-free"]: diff --combined dak/make_suite_file_list.py index de62adb9,76c0b0b3..096098bf --- a/dak/make_suite_file_list.py +++ b/dak/make_suite_file_list.py @@@ -39,16 -39,17 +39,16 @@@ Generate file lists used by apt-ftparch import copy import os -import pg import sys import apt_pkg -from daklib import database -from daklib import logging + +from daklib.dbconn import * +from daklib.config import Config +from daklib import daklog from daklib import utils ################################################################################ -Cnf = None #: Configuration, apt_pkg.Configuration -projectB = None #: database connection, pgobject Logger = None #: Logger object Options = None #: Parsed CommandLine arguments @@@ -81,9 -82,8 +81,9 @@@ def version_cmp(a, b) ##################################################### def delete_packages(delete_versions, pkg, dominant_arch, suite, - dominant_version, delete_table, delete_col, packages): - suite_id = database.get_suite_id(suite) + dominant_version, delete_table, delete_col, packages, session): + suite_o = get_suite(suite.lower(), session) + suite_id = suite_o.suite_id for version in delete_versions: delete_unique_id = version[1] if not packages.has_key(delete_unique_id): @@@ -91,13 -91,12 +91,13 @@@ delete_version = version[0] delete_id = packages[delete_unique_id]["sourceid"] delete_arch = packages[delete_unique_id]["arch"] - if not database.get_suite_untouchable(suite) or Options["Force"]: + if Options["Force"] or not suite_o.untouchable: if Options["No-Delete"]: print "Would delete %s_%s_%s in %s in favour of %s_%s" % (pkg, delete_arch, delete_version, suite, dominant_version, dominant_arch) else: Logger.log(["dominated", pkg, delete_arch, delete_version, dominant_version, dominant_arch]) - projectB.query("DELETE FROM %s WHERE suite = %s AND %s = %s" % (delete_table, suite_id, delete_col, delete_id)) + # TODO: Fix properly + session.execute("DELETE FROM %s WHERE suite = :suiteid AND %s = :delid" % (delete_table, delete_col), {'suiteid': suite_id, 'delid': delete_id}) del packages[delete_unique_id] else: if Options["No-Delete"]: @@@ -107,7 -106,7 +107,7 @@@ ##################################################### -def resolve_arch_all_vs_any(versions, packages): +def resolve_arch_all_vs_any(versions, packages, session): """ Per-suite&pkg: resolve arch-all, vs. arch-any, assumes only one arch-all """ arch_all_version = None arch_any_versions = copy.copy(versions) @@@ -130,12 -129,12 +130,12 @@@ if apt_pkg.VersionCompare(highest_arch_any_version, arch_all_version) < 1: # arch: all dominates delete_packages(arch_any_versions, pkg, "all", suite, - arch_all_version, delete_table, delete_col, packages) + arch_all_version, delete_table, delete_col, packages, session) else: # arch: any dominates delete_packages(arch_all_versions, pkg, "any", suite, highest_arch_any_version, delete_table, delete_col, - packages) + packages, session) ##################################################### @@@ -162,7 -161,7 +162,7 @@@ def remove_duplicate_versions(versions ################################################################################ -def cleanup(packages): +def cleanup(packages, session): # Build up the index used by the clean up functions d = {} for unique_id in packages.keys(): @@@ -199,20 -198,18 +199,20 @@@ for arch in arches.keys(): if arch != "source": versions.extend(d[suite][pkg][arch]) - resolve_arch_all_vs_any(versions, packages) + resolve_arch_all_vs_any(versions, packages, session) ################################################################################ def write_filelist(suite, component, arch, type, list, packages, dislocated_files): + cnf = Config() + # Work out the filename if arch != "source": if type == "udeb": arch = "debian-installer_binary-%s" % (arch) elif type == "deb": arch = "binary-%s" % (arch) - filename = os.path.join(Cnf["Dir::Lists"], "%s_%s_%s.list" % (suite, component, arch)) + filename = os.path.join(cnf["Dir::Lists"], "%s_%s_%s.list" % (suite, component, arch)) output = utils.open_file(filename, "w") # Generate the final list of files files = {} @@@ -239,10 -236,8 +239,10 @@@ ################################################################################ -def write_filelists(packages, dislocated_files): +def write_filelists(packages, dislocated_files, session): # Build up the index to iterate over + cnf = Config() + d = {} for unique_id in packages.keys(): suite = packages[unique_id]["suite"] @@@ -256,16 -251,16 +256,16 @@@ d[suite][component][arch][packagetype].append(unique_id) # Flesh out the index if not Options["Suite"]: - suites = Cnf.SubTree("Suite").List() + suites = cnf.SubTree("Suite").List() else: suites = utils.split_args(Options["Suite"]) for suite in [ i.lower() for i in suites ]: d.setdefault(suite, {}) if not Options["Component"]: - components = Cnf.ValueList("Suite::%s::Components" % (suite)) + components = cnf.ValueList("Suite::%s::Components" % (suite)) else: components = utils.split_args(Options["Component"]) - udeb_components = Cnf.ValueList("Suite::%s::UdebComponents" % (suite)) + udeb_components = cnf.ValueList("Suite::%s::UdebComponents" % (suite)) for component in components: d[suite].setdefault(component, {}) if component in udeb_components: @@@ -273,9 -268,9 +273,9 @@@ else: binary_types = [ "deb" ] if not Options["Architecture"]: - architectures = database.get_suite_architectures(suite) + architectures = [ a.arch_string for a in get_suite_architectures(suite, session=session) ] else: - architectures = utils.split_args(Options["Architectures"]) + architectures = utils.split_args(Options["Architecture"]) for arch in [ i.lower() for i in architectures ]: d[suite][component].setdefault(arch, {}) if arch == "source": @@@ -286,7 -281,7 +286,7 @@@ d[suite][component][arch].setdefault(packagetype, []) # Then walk it for suite in d.keys(): - if Cnf.has_key("Suite::%s::Components" % (suite)): + if cnf.has_key("Suite::%s::Components" % (suite)): for component in d[suite].keys(): for arch in d[suite][component].keys(): if arch == "all": @@@ -295,7 -290,7 +295,7 @@@ filelist = d[suite][component][arch][packagetype] # If it's a binary, we need to add in the arch: all debs too if arch != "source": - archall_suite = Cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite)) + archall_suite = cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite)) if archall_suite: filelist.extend(d[archall_suite][component]["all"][packagetype]) elif d[suite][component].has_key("all") and \ @@@ -314,7 -309,7 +314,7 @@@ def do_da_do_da() if Options["Suite"]: suites = utils.split_args(Options["Suite"]) for suite in suites: - archall_suite = Cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite)) + archall_suite = cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite)) if archall_suite and archall_suite not in suites: utils.warn("Adding %s as %s maps Arch: all from it." % (archall_suite, suite)) suites.append(archall_suite) @@@ -323,13 -318,8 +323,13 @@@ (con_suites, con_architectures, con_components, check_source) = \ utils.parse_args(Options) + session = DBConn().session() + dislocated_files = {} + # TODO: Fix this properly + query_args = {'con_suites': con_suites, 'con_architectures': con_architectures, 'con_components': con_components} + query = """ SELECT b.id, b.package, a.arch_string, b.version, l.path, f.filename, c.name, f.id, su.suite_name, b.type @@@ -337,7 -327,7 +337,7 @@@ component c, suite su WHERE b.id = ba.bin AND b.file = f.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND ba.suite = su.id - %s %s %s""" % (con_suites, con_architectures, con_components) + %(con_suites)s %(con_architectures)s %(con_components)s""" % query_args if check_source: query += """ UNION @@@ -345,32 -335,29 +345,32 @@@ SELECT s.id, s.source, 'source', s.vers su.suite_name, 'dsc' FROM source s, src_associations sa, files f, location l, component c, suite su WHERE s.id = sa.source AND s.file = f.id AND f.location = l.id - AND l.component = c.id AND sa.suite = su.id %s %s""" % (con_suites, con_components) - q = projectB.query(query) - ql = q.getresult() + AND l.component = c.id AND sa.suite = su.id %(con_suites)s %(con_components)s""" % query_args + # Build up the main index of packages packages = {} unique_id = 0 - for i in ql: + + q = session.execute(query) + for i in q.fetchall(): (sourceid, pkg, arch, version, path, filename, component, file_id, suite, filetype) = i + # 'id' comes from either 'binaries' or 'source', so it's not unique unique_id += 1 packages[unique_id] = Dict(sourceid=sourceid, pkg=pkg, arch=arch, version=version, path=path, filename=filename, component=component, file_id=file_id, suite=suite, filetype = filetype) - cleanup(packages) - write_filelists(packages, dislocated_files) + cleanup(packages, session) + session.commit() + write_filelists(packages, dislocated_files, session) ################################################################################ def main(): - global Cnf, projectB, Options, Logger + global Options, Logger - Cnf = utils.get_conf() + cnf = Config() Arguments = [('a', "architecture", "Make-Suite-File-List::Options::Architecture", "HasArg"), ('c', "component", "Make-Suite-File-List::Options::Component", "HasArg"), ('h', "help", "Make-Suite-File-List::Options::Help"), @@@ -378,16 -365,16 +378,16 @@@ ('f', "force", "Make-Suite-File-List::Options::Force"), ('s', "suite", "Make-Suite-File-List::Options::Suite", "HasArg")] for i in ["architecture", "component", "help", "no-delete", "suite", "force" ]: - if not Cnf.has_key("Make-Suite-File-List::Options::%s" % (i)): - Cnf["Make-Suite-File-List::Options::%s" % (i)] = "" - apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv) - Options = Cnf.SubTree("Make-Suite-File-List::Options") + if not cnf.has_key("Make-Suite-File-List::Options::%s" % (i)): + cnf["Make-Suite-File-List::Options::%s" % (i)] = "" + apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv) + Options = cnf.SubTree("Make-Suite-File-List::Options") if Options["Help"]: usage() - projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"])) - database.init(Cnf, projectB) - Logger = logging.Logger(Cnf, "make-suite-file-list") + DBConn() + + Logger = daklog.Logger(cnf.Cnf, "make-suite-file-list") do_da_do_da() Logger.close() diff --combined dak/new_security_install.py index fec030b7,549fe5b5..24e89b92 --- a/dak/new_security_install.py +++ b/dak/new_security_install.py @@@ -23,7 -23,7 +23,7 @@@ import apt_pkg, os, sys, pwd, time, commands from daklib import queue -from daklib import logging +from daklib import daklog from daklib import utils from daklib import database from daklib.regexes import re_taint_free @@@ -60,9 -60,7 +60,7 @@@ def init() Options = Cnf.SubTree("Security-Install::Options") - whoami = os.getuid() - whoamifull = pwd.getpwuid(whoami) - username = whoamifull[0] + username = utils.getusername() if username != "dak": print "Non-dak user: %s" % username Options["Sudo"] = "y" @@@ -78,7 -76,7 +76,7 @@@ if Options["No-Action"]: Options["Sudo"] = "" if not Options["Sudo"] and not Options["No-Action"]: - Logger = Upload.Logger = logging.Logger(Cnf, "new-security-install") + Logger = Upload.Logger = daklog.Logger(Cnf, "new-security-install") return arguments diff --combined dak/override.py index 9e1735e8,97439d37..413c3447 --- a/dak/override.py +++ b/dak/override.py @@@ -25,14 -25,16 +25,14 @@@ ## That Alisha Rules The World ################################################################################ -import pg, sys +import os +import sys import apt_pkg -from daklib import logging -from daklib import database -from daklib import utils - -################################################################################ -Cnf = None -projectB = None +from daklib.config import Config +from daklib.dbconn import * +from daklib import daklog +from daklib import utils ################################################################################ @@@ -56,7 -58,9 +56,7 @@@ Make microchanges or microqueries of th sys.exit(exit_code) def main (): - global Cnf, projectB - - Cnf = utils.get_conf() + cnf = Config() Arguments = [('h',"help","Override::Options::Help"), ('d',"done","Override::Options::Done", "HasArg"), @@@ -64,18 -68,19 +64,18 @@@ ('s',"suite","Override::Options::Suite", "HasArg"), ] for i in ["help", "no-action"]: - if not Cnf.has_key("Override::Options::%s" % (i)): - Cnf["Override::Options::%s" % (i)] = "" - if not Cnf.has_key("Override::Options::Suite"): - Cnf["Override::Options::Suite"] = "unstable" + if not cnf.has_key("Override::Options::%s" % (i)): + cnf["Override::Options::%s" % (i)] = "" + if not cnf.has_key("Override::Options::Suite"): + cnf["Override::Options::Suite"] = "unstable" - arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv) - Options = Cnf.SubTree("Override::Options") + arguments = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv) + Options = cnf.SubTree("Override::Options") if Options["Help"]: usage() - projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"])) - database.init(Cnf, projectB) + session = DBConn().session() if not arguments: utils.fubar("package name is a required argument.") @@@ -88,15 -93,15 +88,15 @@@ if arguments and len(arguments) == 1: # Determine if the argument is a priority or a section... arg = arguments.pop() - q = projectB.query(""" - SELECT ( SELECT COUNT(*) FROM section WHERE section=%s ) AS secs, - ( SELECT COUNT(*) FROM priority WHERE priority=%s ) AS prios - """ % ( pg._quote(arg,"str"), pg._quote(arg,"str"))) - r = q.getresult() + q = session.execute(""" + SELECT ( SELECT COUNT(*) FROM section WHERE section = :arg ) AS secs, + ( SELECT COUNT(*) FROM priority WHERE priority = :arg ) AS prios + """, {'arg': arg}) + r = q.fetchall() if r[0][0] == 1: - arguments = (arg,".") + arguments = (arg, ".") elif r[0][1] == 1: - arguments = (".",arg) + arguments = (".", arg) else: utils.fubar("%s is not a valid section or priority" % (arg)) @@@ -106,44 -111,42 +106,44 @@@ eqdsc = '!=' if packagetype == 'source': eqdsc = '=' - q = projectB.query(""" + q = session.execute(""" SELECT priority.priority AS prio, section.section AS sect, override_type.type AS type FROM override, priority, section, suite, override_type WHERE override.priority = priority.id AND override.type = override_type.id AND override_type.type %s 'dsc' AND override.section = section.id - AND override.package = %s + AND override.package = :package AND override.suite = suite.id - AND suite.suite_name = %s - """ % (eqdsc, pg._quote(package,"str"), pg._quote(suite,"str"))) + AND suite.suite_name = :suite + """ % (eqdsc), {'package': package, 'suite': suite}) - if q.ntuples() == 0: + if q.rowcount == 0: continue - if q.ntuples() > 1: + if q.rowcount > 1: utils.fubar("%s is ambiguous. Matches %d packages" % (package,q.ntuples())) - r = q.getresult() + r = q.fetchone() if packagetype == 'binary': - oldsection = r[0][1] - oldpriority = r[0][0] + oldsection = r[1] + oldpriority = r[0] else: - oldsourcesection = r[0][1] + oldsourcesection = r[1] oldpriority = 'source' if not oldpriority and not oldsourcesection: utils.fubar("Unable to find package %s" % (package)) + if oldsection and oldsourcesection and oldsection != oldsourcesection: # When setting overrides, both source & binary will become the same section utils.warn("Source is in section '%s' instead of '%s'" % (oldsourcesection, oldsection)) + if not oldsection: oldsection = oldsourcesection if not arguments: print "%s is in section '%s' at priority '%s'" % ( - package,oldsection,oldpriority) + package, oldsection, oldpriority) sys.exit(0) # At this point, we have a new section and priority... check they're valid... @@@ -154,15 -157,19 +154,15 @@@ if newpriority == ".": newpriority = oldpriority - q = projectB.query("SELECT id FROM section WHERE section=%s" % ( - pg._quote(newsection,"str"))) - - if q.ntuples() == 0: + s = get_section(newsection, session) + if s is None: utils.fubar("Supplied section %s is invalid" % (newsection)) - newsecid = q.getresult()[0][0] + newsecid = s.section_id - q = projectB.query("SELECT id FROM priority WHERE priority=%s" % ( - pg._quote(newpriority,"str"))) - - if q.ntuples() == 0: + p = get_priority(newpriority, session) + if p is None: utils.fubar("Supplied priority %s is invalid" % (newpriority)) - newprioid = q.getresult()[0][0] + newprioid = p.priority_id if newpriority == oldpriority and newsection == oldsection: print "I: Doing nothing" @@@ -184,7 -191,6 +184,7 @@@ if newpriority != oldpriority: print "I: Will change priority from %s to %s" % (oldpriority,newpriority) + if newsection != oldsection: print "I: Will change section from %s to %s" % (oldsection,newsection) @@@ -196,53 -202,50 +196,53 @@@ game_over() - Logger = logging.Logger(Cnf, "override") + Logger = daklog.Logger(cnf.Cnf, "override") - projectB.query("BEGIN WORK") + dsc_otype_id = get_override_type('dsc').overridetype_id + + # We're already in a transaction # We're in "do it" mode, we have something to do... do it if newpriority != oldpriority: - q = projectB.query(""" + session.execute(""" UPDATE override - SET priority=%d - WHERE package=%s - AND override.type != %d - AND suite = (SELECT id FROM suite WHERE suite_name=%s)""" % ( - newprioid, - pg._quote(package,"str"), database.get_override_type_id("dsc"), - pg._quote(suite,"str") )) - Logger.log(["changed priority",package,oldpriority,newpriority]) + SET priority = :newprioid + WHERE package = :package + AND override.type != :otypedsc + AND suite = (SELECT id FROM suite WHERE suite_name = :suite)""", + {'newprioid': newprioid, 'package': package, + 'otypedsc': dsc_otype_id, 'suite': suite}) + + Logger.log(["changed priority", package, oldpriority, newpriority]) if newsection != oldsection: - q = projectB.query(""" + q = session.execute(""" UPDATE override - SET section=%d - WHERE package=%s - AND suite = (SELECT id FROM suite WHERE suite_name=%s)""" % ( - newsecid, - pg._quote(package,"str"), - pg._quote(suite,"str") )) - Logger.log(["changed section",package,oldsection,newsection]) - projectB.query("COMMIT WORK") + SET section = :newsecid + WHERE package = :package + AND suite = (SELECT id FROM suite WHERE suite_name = :suite)""", + {'newsecid': newsecid, 'package': package, + 'suite': suite}) + + Logger.log(["changed section", package, oldsection, newsection]) + + session.commit() if Options.has_key("Done"): Subst = {} - Subst["__OVERRIDE_ADDRESS__"] = Cnf["Override::MyEmailAddress"] - Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"] + Subst["__OVERRIDE_ADDRESS__"] = cnf["Override::MyEmailAddress"] + Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] bcc = [] - if Cnf.Find("Dinstall::Bcc") != "": - bcc.append(Cnf["Dinstall::Bcc"]) - if Cnf.Find("Override::Bcc") != "": - bcc.append(Cnf["Override::Bcc"]) + if cnf.Find("Dinstall::Bcc") != "": + bcc.append(cnf["Dinstall::Bcc"]) + if cnf.Find("Override::Bcc") != "": + bcc.append(cnf["Override::Bcc"]) if bcc: Subst["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst["__BCC__"] = "X-Filler: 42" - Subst["__CC__"] = "X-DAK: dak override\nX-Katie: alicia" - Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] - Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"] + Subst["__CC__"] = "Cc: " + package + "@" + Cnf["Dinstall::PackagesServer"] + "\nX-DAK: dak override\nX-Katie: alicia" + Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"] + Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"] Subst["__WHOAMI__"] = utils.whoami() Subst["__SOURCE__"] = package @@@ -254,15 -257,17 +254,15 @@@ summary += "Changed section from %s to %s\n" % (oldsection,newsection) Subst["__SUMMARY__"] = summary + template = os.path.join(cnf["Dir::Templates"], "override.bug-close") for bug in utils.split_args(Options["Done"]): Subst["__BUG_NUMBER__"] = bug - mail_message = utils.TemplateSubst( - Subst,Cnf["Dir::Templates"]+"/override.bug-close") + mail_message = utils.TemplateSubst(Subst, template) utils.send_mail(mail_message) - Logger.log(["closed bug",bug]) + Logger.log(["closed bug", bug]) Logger.close() - print "Done" - ################################################################################# if __name__ == '__main__': diff --combined dak/process_new.py index 777e0791,3800fc8e..f15a5600 --- a/dak/process_new.py +++ b/dak/process_new.py @@@ -54,14 -54,12 +54,14 @@@ import contextli import pwd import apt_pkg, apt_inst import examine_package + from daklib import database -from daklib import logging +from daklib import daklog from daklib import queue from daklib import utils from daklib.regexes import re_no_epoch, re_default_answer, re_isanum from daklib.dak_exceptions import CantOpenError, AlreadyLockedError, CantGetLockError +from daklib.summarystats import SummaryStats # Globals Cnf = None #: Configuration, apt_pkg.Configuration @@@ -554,6 -552,7 +554,7 @@@ def do_bxa_notification() def add_overrides (new): changes = Upload.pkg.changes files = Upload.pkg.files + srcpkg = changes.get("source") projectB.query("BEGIN WORK") for suite in changes["suite"].keys(): @@@ -563,6 -562,7 +564,7 @@@ type_id = database.get_override_type_id(new[pkg]["type"]) priority_id = new[pkg]["priority id"] section_id = new[pkg]["section id"] + Logger.log(["%s overrides" % (srcpkg), suite, new[pkg]["component"], new[pkg]["type"], new[pkg]["priority"], new[pkg]["section"]]) projectB.query("INSERT INTO override (suite, component, type, package, priority, section, maintainer) VALUES (%s, %s, %s, '%s', %s, %s, '')" % (suite_id, component_id, type_id, pkg, priority_id, section_id)) for f in new[pkg]["files"]: if files[f].has_key("new"): @@@ -693,6 -693,7 +695,7 @@@ def do_new() try: check_daily_lock() done = add_overrides (new) + Logger.log([utils.getusername(), "NEW ACCEPT: %s" % (Upload.pkg.changes_file)]) except CantGetLockError: print "Hello? Operator! Give me the number for 911!" print "Dinstall in the locked area, cant process packages, come back later" @@@ -705,12 -706,14 +708,14 @@@ reject_message=Options["Manual-Reject"], note=database.get_new_comments(changes.get("source", ""))) if not aborted: + Logger.log([utils.getusername(), "NEW REJECT: %s" % (Upload.pkg.changes_file)]) os.unlink(Upload.pkg.changes_file[:-8]+".dak") done = 1 elif answer == 'N': edit_note(database.get_new_comments(changes.get("source", ""))) elif answer == 'P' and not Options["Trainee"]: prod_maintainer(database.get_new_comments(changes.get("source", ""))) + Logger.log([utils.getusername(), "NEW PROD: %s" % (Upload.pkg.changes_file)]) elif answer == 'R' and not Options["Trainee"]: confirm = utils.our_raw_input("Really clear note (y/N)? ").lower() if confirm == "y": @@@ -771,7 -774,7 +776,7 @@@ def init() if not Options["No-Action"]: try: - Logger = Upload.Logger = logging.Logger(Cnf, "process-new") + Logger = Upload.Logger = daklog.Logger(Cnf, "process-new") except CantOpenError, e: Options["Trainee"] = "True" @@@ -823,10 -826,12 +828,12 @@@ def do_byhand() done = 1 for f in byhand: del files[f] + Logger.log([utils.getusername(), "BYHAND ACCEPT: %s" % (Upload.pkg.changes_file)]) except CantGetLockError: print "Hello? Operator! Give me the number for 911!" print "Dinstall in the locked area, cant process packages, come back later" elif answer == 'M': + Logger.log([utils.getusername(), "BYHAND REJECT: %s" % (Upload.pkg.changes_file)]) Upload.do_reject(1, Options["Manual-Reject"]) os.unlink(Upload.pkg.changes_file[:-8]+".dak") done = 1 @@@ -929,10 -934,12 +936,12 @@@ def do_accept_stableupdate(suite, q) # writing this means that it is installed, so put it into # accepted. print "Binary-only upload, source installed." + Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)]) _accept() elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]): # The source is in accepted, the binary cleared NEW: accept it. print "Binary-only upload, source in accepted." + Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)]) _accept() elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]): # It's in NEW. We expect the source to land in p-u holding @@@ -942,6 -949,7 +951,7 @@@ elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]): # It's in newstage. Accept into the holding area print "Binary-only upload, source in newstage." + Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)]) _accept() else: # No case applicable. Bail out. Return will cause the upload @@@ -1023,15 -1031,15 +1033,15 @@@ def do_pkg(changes_file) ################################################################################ def end(): - accept_count = Upload.accept_count - accept_bytes = Upload.accept_bytes + accept_count = SummaryStats().accept_count + accept_bytes = SummaryStats().accept_bytes if accept_count: sets = "set" if accept_count > 1: sets = "sets" sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, utils.size_type(int(accept_bytes)))) - Logger.log(["total",accept_count,accept_bytes]) + Logger.log([utils.getusername(), "total",accept_count,accept_bytes]) if not Options["No-Action"] and not Options["Trainee"]: Logger.close() diff --combined dak/process_unchecked.py index 0eec1ee8,7b0f9aba..ca61ee3e --- a/dak/process_unchecked.py +++ b/dak/process_unchecked.py @@@ -5,7 -5,6 +5,7 @@@ Checks Debian packages from Incomin @contact: Debian FTP Master @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup @copyright: 2009 Joerg Jaspert +@copyright: 2009 Mark Hymers @license: GNU General Public License version 2 or later """ @@@ -48,18 -47,16 +48,18 @@@ import tarfil import apt_inst import apt_pkg from debian_bundle import deb822 -from daklib.dbconn import DBConn + +from daklib.dbconn import * from daklib.binary import Binary -from daklib import logging -from daklib import queue +from daklib import daklog +from daklib.queue import * from daklib import utils +from daklib.textutils import fix_maintainer from daklib.dak_exceptions import * -from daklib.regexes import re_valid_version, re_valid_pkg_name, re_changelog_versions, \ - re_strip_revision, re_strip_srcver, re_spacestrip, \ - re_isanum, re_no_epoch, re_no_revision, re_taint_free, \ - re_isadeb, re_extract_src_version, re_issource, re_default_answer +from daklib.regexes import re_default_answer +from daklib.summarystats import SummaryStats +from daklib.holding import Holding +from daklib.config import Config from types import * @@@ -69,16 -66,31 +69,16 @@@ ################################################################################ # Globals -Cnf = None Options = None Logger = None -Upload = None - -reprocess = 0 -in_holding = {} - -# Aliases to the real vars in the Upload class; hysterical raisins. -reject_message = "" -changes = {} -dsc = {} -dsc_files = {} -files = {} -pkg = {} ############################################################################### def init(): - global Cnf, Options, Upload, changes, dsc, dsc_files, files, pkg + global Options apt_pkg.init() - - Cnf = apt_pkg.newConfiguration() - apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file()) + cnf = Config() Arguments = [('a',"automatic","Dinstall::Options::Automatic"), ('h',"help","Dinstall::Options::Help"), @@@ -89,22 -101,30 +89,22 @@@ for i in ["automatic", "help", "no-action", "no-lock", "no-mail", "override-distribution", "version", "directory"]: - Cnf["Dinstall::Options::%s" % (i)] = "" + cnf["Dinstall::Options::%s" % (i)] = "" - changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv) - Options = Cnf.SubTree("Dinstall::Options") + changes_files = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv) + Options = cnf.SubTree("Dinstall::Options") if Options["Help"]: usage() # If we have a directory flag, use it to find our files - if Cnf["Dinstall::Options::Directory"] != "": + if cnf["Dinstall::Options::Directory"] != "": # Note that we clobber the list of files we were given in this case # so warn if the user has done both if len(changes_files) > 0: utils.warn("Directory provided so ignoring files given on command line") - changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"]) - - Upload = queue.Upload(Cnf) - - changes = Upload.pkg.changes - dsc = Upload.pkg.dsc - dsc_files = Upload.pkg.dsc_files - files = Upload.pkg.files - pkg = Upload.pkg + changes_files = utils.get_changes_files(cnf["Dinstall::Options::Directory"]) return changes_files @@@ -122,14 -142,1027 +122,1027 @@@ def usage (exit_code=0) ################################################################################ - def action(u): - cnf = Config() + def reject (str, prefix="Rejected: "): + global reject_message + if str: + reject_message += prefix + str + "\n" + + ################################################################################ + + def copy_to_holding(filename): + global in_holding + + base_filename = os.path.basename(filename) + + dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename + try: + fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640) + os.close(fd) + except OSError, e: + # Shouldn't happen, but will if, for example, someone lists a + # file twice in the .changes. + if errno.errorcode[e.errno] == 'EEXIST': + reject("%s: already exists in holding area; can not overwrite." % (base_filename)) + return + raise + + try: + shutil.copy(filename, dest) + except IOError, e: + # In either case (ENOENT or EACCES) we want to remove the + # O_CREAT | O_EXCLed ghost file, so add the file to the list + # of 'in holding' even if it's not the real file. + if errno.errorcode[e.errno] == 'ENOENT': + reject("%s: can not copy to holding area: file not found." % (base_filename)) + os.unlink(dest) + return + elif errno.errorcode[e.errno] == 'EACCES': + reject("%s: can not copy to holding area: read permission denied." % (base_filename)) + os.unlink(dest) + return + raise + + in_holding[base_filename] = "" + + ################################################################################ + + def clean_holding(): + global in_holding + + cwd = os.getcwd() + os.chdir(Cnf["Dir::Queue::Holding"]) + for f in in_holding.keys(): + if os.path.exists(f): + if f.find('/') != -1: + utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f)) + else: + os.unlink(f) + in_holding = {} + os.chdir(cwd) + + ################################################################################ + + def check_changes(): + filename = pkg.changes_file + + # Parse the .changes field into a dictionary + try: + changes.update(utils.parse_changes(filename)) + except CantOpenError: + reject("%s: can't read file." % (filename)) + return 0 + except ParseChangesError, line: + reject("%s: parse error, can't grok: %s." % (filename, line)) + return 0 + except ChangesUnicodeError: + reject("%s: changes file not proper utf-8" % (filename)) + return 0 + + # Parse the Files field from the .changes into another dictionary + try: + files.update(utils.build_file_list(changes)) + except ParseChangesError, line: + reject("%s: parse error, can't grok: %s." % (filename, line)) + except UnknownFormatError, format: + reject("%s: unknown format '%s'." % (filename, format)) + return 0 + + # Check for mandatory fields + for i in ("source", "binary", "architecture", "version", "distribution", + "maintainer", "files", "changes", "description"): + if not changes.has_key(i): + reject("%s: Missing mandatory field `%s'." % (filename, i)) + return 0 # Avoid errors during later tests + + # Strip a source version in brackets from the source field + if re_strip_srcver.search(changes["source"]): + changes["source"] = re_strip_srcver.sub('', changes["source"]) + + # Ensure the source field is a valid package name. + if not re_valid_pkg_name.match(changes["source"]): + reject("%s: invalid source name '%s'." % (filename, changes["source"])) + + # Split multi-value fields into a lower-level dictionary + for i in ("architecture", "distribution", "binary", "closes"): + o = changes.get(i, "") + if o != "": + del changes[i] + changes[i] = {} + for j in o.split(): + changes[i][j] = 1 + + # Fix the Maintainer: field to be RFC822/2047 compatible + try: + (changes["maintainer822"], changes["maintainer2047"], + changes["maintainername"], changes["maintaineremail"]) = \ + utils.fix_maintainer (changes["maintainer"]) + except ParseMaintError, msg: + reject("%s: Maintainer field ('%s') failed to parse: %s" \ + % (filename, changes["maintainer"], msg)) + + # ...likewise for the Changed-By: field if it exists. + try: + (changes["changedby822"], changes["changedby2047"], + changes["changedbyname"], changes["changedbyemail"]) = \ + utils.fix_maintainer (changes.get("changed-by", "")) + except ParseMaintError, msg: + (changes["changedby822"], changes["changedby2047"], + changes["changedbyname"], changes["changedbyemail"]) = \ + ("", "", "", "") + reject("%s: Changed-By field ('%s') failed to parse: %s" \ + % (filename, changes["changed-by"], msg)) + + # Ensure all the values in Closes: are numbers + if changes.has_key("closes"): + for i in changes["closes"].keys(): + if re_isanum.match (i) == None: + reject("%s: `%s' from Closes field isn't a number." % (filename, i)) + + + # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison) + changes["chopversion"] = re_no_epoch.sub('', changes["version"]) + changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"]) + + # Check there isn't already a changes file of the same name in one + # of the queue directories. + base_filename = os.path.basename(filename) + for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]: + if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename): + reject("%s: a file with this name already exists in the %s directory." % (base_filename, d)) + + # Check the .changes is non-empty + if not files: + reject("%s: nothing to do (Files field is empty)." % (base_filename)) + return 0 + + return 1 + + ################################################################################ + + def check_distributions(): + "Check and map the Distribution field of a .changes file." + + # Handle suite mappings + for m in Cnf.ValueList("SuiteMappings"): + args = m.split() + mtype = args[0] + if mtype == "map" or mtype == "silent-map": + (source, dest) = args[1:3] + if changes["distribution"].has_key(source): + del changes["distribution"][source] + changes["distribution"][dest] = 1 + if mtype != "silent-map": + reject("Mapping %s to %s." % (source, dest),"") + if changes.has_key("distribution-version"): + if changes["distribution-version"].has_key(source): + changes["distribution-version"][source]=dest + elif mtype == "map-unreleased": + (source, dest) = args[1:3] + if changes["distribution"].has_key(source): + for arch in changes["architecture"].keys(): + if arch not in DBConn().get_suite_architectures(source): + reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"") + del changes["distribution"][source] + changes["distribution"][dest] = 1 + break + elif mtype == "ignore": + suite = args[1] + if changes["distribution"].has_key(suite): + del changes["distribution"][suite] + reject("Ignoring %s as a target suite." % (suite), "Warning: ") + elif mtype == "reject": + suite = args[1] + if changes["distribution"].has_key(suite): + reject("Uploads to %s are not accepted." % (suite)) + elif mtype == "propup-version": + # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes" + # + # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'} + if changes["distribution"].has_key(args[1]): + changes.setdefault("distribution-version", {}) + for suite in args[2:]: changes["distribution-version"][suite]=suite + + # Ensure there is (still) a target distribution + if changes["distribution"].keys() == []: + reject("no valid distribution.") + + # Ensure target distributions exist + for suite in changes["distribution"].keys(): + if not Cnf.has_key("Suite::%s" % (suite)): + reject("Unknown distribution `%s'." % (suite)) + + ################################################################################ + + def check_files(): + global reprocess + + archive = utils.where_am_i() + file_keys = files.keys() + + # if reprocess is 2 we've already done this and we're checking + # things again for the new .orig.tar.gz. + # [Yes, I'm fully aware of how disgusting this is] + if not Options["No-Action"] and reprocess < 2: + cwd = os.getcwd() + os.chdir(pkg.directory) + for f in file_keys: + copy_to_holding(f) + os.chdir(cwd) + + # Check there isn't already a .changes or .dak file of the same name in + # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories. + # [NB: this check must be done post-suite mapping] + base_filename = os.path.basename(pkg.changes_file) + dot_dak_filename = base_filename[:-8]+".dak" + for suite in changes["distribution"].keys(): + copychanges = "Suite::%s::CopyChanges" % (suite) + if Cnf.has_key(copychanges) and \ + os.path.exists(Cnf[copychanges]+"/"+base_filename): + reject("%s: a file with this name already exists in %s" \ + % (base_filename, Cnf[copychanges])) + + copy_dot_dak = "Suite::%s::CopyDotDak" % (suite) + if Cnf.has_key(copy_dot_dak) and \ + os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename): + reject("%s: a file with this name already exists in %s" \ + % (dot_dak_filename, Cnf[copy_dot_dak])) + + reprocess = 0 + has_binaries = 0 + has_source = 0 + + cursor = DBConn().cursor() + # Check for packages that have moved from one component to another + # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn + DBConn().prepare("moved_pkg_q", """ + PREPARE moved_pkg_q(text,text,text) AS + SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, + component c, architecture a, files f + WHERE b.package = $1 AND s.suite_name = $2 + AND (a.arch_string = $3 OR a.arch_string = 'all') + AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id + AND f.location = l.id + AND l.component = c.id + AND b.file = f.id""") + + for f in file_keys: + # Ensure the file does not already exist in one of the accepted directories + for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]: + if not Cnf.has_key("Dir::Queue::%s" % (d)): continue + if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f): + reject("%s file already exists in the %s directory." % (f, d)) + if not re_taint_free.match(f): + reject("!!WARNING!! tainted filename: '%s'." % (f)) + # Check the file is readable + if os.access(f, os.R_OK) == 0: + # When running in -n, copy_to_holding() won't have + # generated the reject_message, so we need to. + if Options["No-Action"]: + if os.path.exists(f): + reject("Can't read `%s'. [permission denied]" % (f)) + else: + reject("Can't read `%s'. [file not found]" % (f)) + files[f]["type"] = "unreadable" + continue + # If it's byhand skip remaining checks + if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-": + files[f]["byhand"] = 1 + files[f]["type"] = "byhand" + # Checks for a binary package... + elif re_isadeb.match(f): + has_binaries = 1 + files[f]["type"] = "deb" + + # Extract package control information + deb_file = utils.open_file(f) + try: + control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file)) + except: + reject("%s: debExtractControl() raised %s." % (f, sys.exc_type)) + deb_file.close() + # Can't continue, none of the checks on control would work. + continue + + # Check for mandantory "Description:" + deb_file.seek ( 0 ) + try: + apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n' + except: + reject("%s: Missing Description in binary package" % (f)) + continue + + deb_file.close() + + # Check for mandatory fields + for field in [ "Package", "Architecture", "Version" ]: + if control.Find(field) == None: + reject("%s: No %s field in control." % (f, field)) + # Can't continue + continue + + # Ensure the package name matches the one give in the .changes + if not changes["binary"].has_key(control.Find("Package", "")): + reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", ""))) + + # Validate the package field + package = control.Find("Package") + if not re_valid_pkg_name.match(package): + reject("%s: invalid package name '%s'." % (f, package)) + + # Validate the version field + version = control.Find("Version") + if not re_valid_version.match(version): + reject("%s: invalid version number '%s'." % (f, version)) + + # Ensure the architecture of the .deb is one we know about. + default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable") + architecture = control.Find("Architecture") + upload_suite = changes["distribution"].keys()[0] + if architecture not in DBConn().get_suite_architectures(default_suite) and architecture not in DBConn().get_suite_architectures(upload_suite): + reject("Unknown architecture '%s'." % (architecture)) + + # Ensure the architecture of the .deb is one of the ones + # listed in the .changes. + if not changes["architecture"].has_key(architecture): + reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture)) + + # Sanity-check the Depends field + depends = control.Find("Depends") + if depends == '': + reject("%s: Depends field is empty." % (f)) + + # Sanity-check the Provides field + provides = control.Find("Provides") + if provides: + provide = re_spacestrip.sub('', provides) + if provide == '': + reject("%s: Provides field is empty." % (f)) + prov_list = provide.split(",") + for prov in prov_list: + if not re_valid_pkg_name.match(prov): + reject("%s: Invalid Provides field content %s." % (f, prov)) + + + # Check the section & priority match those given in the .changes (non-fatal) + if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"): + reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ") + if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"): + reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ") + + files[f]["package"] = package + files[f]["architecture"] = architecture + files[f]["version"] = version + files[f]["maintainer"] = control.Find("Maintainer", "") + if f.endswith(".udeb"): + files[f]["dbtype"] = "udeb" + elif f.endswith(".deb"): + files[f]["dbtype"] = "deb" + else: + reject("%s is neither a .deb or a .udeb." % (f)) + files[f]["source"] = control.Find("Source", files[f]["package"]) + # Get the source version + source = files[f]["source"] + source_version = "" + if source.find("(") != -1: + m = re_extract_src_version.match(source) + source = m.group(1) + source_version = m.group(2) + if not source_version: + source_version = files[f]["version"] + files[f]["source package"] = source + files[f]["source version"] = source_version + + # Ensure the filename matches the contents of the .deb + m = re_isadeb.match(f) + # package name + file_package = m.group(1) + if files[f]["package"] != file_package: + reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"])) + epochless_version = re_no_epoch.sub('', control.Find("Version")) + # version + file_version = m.group(2) + if epochless_version != file_version: + reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version)) + # architecture + file_architecture = m.group(3) + if files[f]["architecture"] != file_architecture: + reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"])) + + # Check for existent source + source_version = files[f]["source version"] + source_package = files[f]["source package"] + if changes["architecture"].has_key("source"): + if source_version != changes["version"]: + reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"])) + else: + # Check in the SQL database + if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()): + # Check in one of the other directories + source_epochless_version = re_no_epoch.sub('', source_version) + dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version) + if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename): + files[f]["byhand"] = 1 + elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename): + files[f]["new"] = 1 + else: + dsc_file_exists = 0 + for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]: + if Cnf.has_key("Dir::Queue::%s" % (myq)): + if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename): + dsc_file_exists = 1 + break + if not dsc_file_exists: + reject("no source found for %s %s (%s)." % (source_package, source_version, f)) + # Check the version and for file overwrites + reject(Upload.check_binary_against_db(f),"") + + Binary(f, reject).scan_package() + + # Checks for a source package... + else: + m = re_issource.match(f) + if m: + has_source = 1 + files[f]["package"] = m.group(1) + files[f]["version"] = m.group(2) + files[f]["type"] = m.group(3) + + # Ensure the source package name matches the Source filed in the .changes + if changes["source"] != files[f]["package"]: + reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"])) + + # Ensure the source version matches the version in the .changes file + if files[f]["type"] == "orig.tar.gz": + changes_version = changes["chopversion2"] + else: + changes_version = changes["chopversion"] + if changes_version != files[f]["version"]: + reject("%s: should be %s according to changes file." % (f, changes_version)) + + # Ensure the .changes lists source in the Architecture field + if not changes["architecture"].has_key("source"): + reject("%s: changes file doesn't list `source' in Architecture field." % (f)) + + # Check the signature of a .dsc file + if files[f]["type"] == "dsc": + dsc["fingerprint"] = utils.check_signature(f, reject) + + files[f]["architecture"] = "source" + + # Not a binary or source package? Assume byhand... + else: + files[f]["byhand"] = 1 + files[f]["type"] = "byhand" + + # Per-suite file checks + files[f]["oldfiles"] = {} + for suite in changes["distribution"].keys(): + # Skip byhand + if files[f].has_key("byhand"): + continue + + # Handle component mappings + for m in Cnf.ValueList("ComponentMappings"): + (source, dest) = m.split() + if files[f]["component"] == source: + files[f]["original component"] = source + files[f]["component"] = dest + + # Ensure the component is valid for the target suite + if Cnf.has_key("Suite:%s::Components" % (suite)) and \ + files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)): + reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite)) + continue + + # Validate the component + component = files[f]["component"] + component_id = DBConn().get_component_id(component) + if component_id == -1: + reject("file '%s' has unknown component '%s'." % (f, component)) + continue + + # See if the package is NEW + if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f): + files[f]["new"] = 1 + + # Validate the priority + if files[f]["priority"].find('/') != -1: + reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"])) + + # Determine the location + location = Cnf["Dir::Pool"] + location_id = DBConn().get_location_id(location, component, archive) + if location_id == -1: + reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive)) + files[f]["location id"] = location_id + + # Check the md5sum & size against existing files (if any) + files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"]) + files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"]) + if files_id == -1: + reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f)) + elif files_id == -2: + reject("md5sum and/or size mismatch on existing copy of %s." % (f)) + files[f]["files id"] = files_id + + # Check for packages that have moved from one component to another + files[f]['suite'] = suite + cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) ) + ql = cursor.fetchone() + if ql: + files[f]["othercomponents"] = ql[0][0] + + # If the .changes file says it has source, it must have source. + if changes["architecture"].has_key("source"): + if not has_source: + reject("no source found and Architecture line in changes mention source.") + + if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"): + reject("source only uploads are not supported.") + + ############################################################################### + + def check_dsc(): + global reprocess + + # Ensure there is source to check + if not changes["architecture"].has_key("source"): + return 1 + + # Find the .dsc + dsc_filename = None + for f in files.keys(): + if files[f]["type"] == "dsc": + if dsc_filename: + reject("can not process a .changes file with multiple .dsc's.") + return 0 + else: + dsc_filename = f + + # If there isn't one, we have nothing to do. (We have reject()ed the upload already) + if not dsc_filename: + reject("source uploads must contain a dsc file") + return 0 + + # Parse the .dsc file + try: + dsc.update(utils.parse_changes(dsc_filename, signing_rules=1)) + except CantOpenError: + # if not -n copy_to_holding() will have done this for us... + if Options["No-Action"]: + reject("%s: can't read file." % (dsc_filename)) + except ParseChangesError, line: + reject("%s: parse error, can't grok: %s." % (dsc_filename, line)) + except InvalidDscError, line: + reject("%s: syntax error on line %s." % (dsc_filename, line)) + except ChangesUnicodeError: + reject("%s: dsc file not proper utf-8." % (dsc_filename)) + + # Build up the file list of files mentioned by the .dsc + try: + dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1)) + except NoFilesFieldError: + reject("%s: no Files: field." % (dsc_filename)) + return 0 + except UnknownFormatError, format: + reject("%s: unknown format '%s'." % (dsc_filename, format)) + return 0 + except ParseChangesError, line: + reject("%s: parse error, can't grok: %s." % (dsc_filename, line)) + return 0 + + # Enforce mandatory fields + for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"): + if not dsc.has_key(i): + reject("%s: missing mandatory field `%s'." % (dsc_filename, i)) + return 0 + + # Validate the source and version fields + if not re_valid_pkg_name.match(dsc["source"]): + reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"])) + if not re_valid_version.match(dsc["version"]): + reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"])) + + # Bumping the version number of the .dsc breaks extraction by stable's + # dpkg-source. So let's not do that... + if dsc["format"] != "1.0": + reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename)) + + # Validate the Maintainer field + try: + utils.fix_maintainer (dsc["maintainer"]) + except ParseMaintError, msg: + reject("%s: Maintainer field ('%s') failed to parse: %s" \ + % (dsc_filename, dsc["maintainer"], msg)) + + # Validate the build-depends field(s) + for field_name in [ "build-depends", "build-depends-indep" ]: + field = dsc.get(field_name) + if field: + # Check for broken dpkg-dev lossage... + if field.startswith("ARRAY"): + reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title())) + + # Have apt try to parse them... + try: + apt_pkg.ParseSrcDepends(field) + except: + reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title())) + pass + + # Ensure the version number in the .dsc matches the version number in the .changes + epochless_dsc_version = re_no_epoch.sub('', dsc["version"]) + changes_version = files[dsc_filename]["version"] + if epochless_dsc_version != files[dsc_filename]["version"]: + reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version)) + + # Ensure there is a .tar.gz in the .dsc file + has_tar = 0 + for f in dsc_files.keys(): + m = re_issource.match(f) + if not m: + reject("%s: %s in Files field not recognised as source." % (dsc_filename, f)) + continue + ftype = m.group(3) + if ftype == "orig.tar.gz" or ftype == "tar.gz": + has_tar = 1 + if not has_tar: + reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename)) + + # Ensure source is newer than existing source in target suites + reject(Upload.check_source_against_db(dsc_filename),"") + + (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename) + reject(reject_msg, "") + if is_in_incoming: + if not Options["No-Action"]: + copy_to_holding(is_in_incoming) + orig_tar_gz = os.path.basename(is_in_incoming) + files[orig_tar_gz] = {} + files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE] + files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"] + files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"] + files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"] + files[orig_tar_gz]["section"] = files[dsc_filename]["section"] + files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"] + files[orig_tar_gz]["component"] = files[dsc_filename]["component"] + files[orig_tar_gz]["type"] = "orig.tar.gz" + reprocess = 2 + + return 1 + + ################################################################################ + + def get_changelog_versions(source_dir): + """Extracts a the source package and (optionally) grabs the + version history out of debian/changelog for the BTS.""" + + # Find the .dsc (again) + dsc_filename = None + for f in files.keys(): + if files[f]["type"] == "dsc": + dsc_filename = f + + # If there isn't one, we have nothing to do. (We have reject()ed the upload already) + if not dsc_filename: + return + + # Create a symlink mirror of the source files in our temporary directory + for f in files.keys(): + m = re_issource.match(f) + if m: + src = os.path.join(source_dir, f) + # If a file is missing for whatever reason, give up. + if not os.path.exists(src): + return + ftype = m.group(3) + if ftype == "orig.tar.gz" and pkg.orig_tar_gz: + continue + dest = os.path.join(os.getcwd(), f) + os.symlink(src, dest) + + # If the orig.tar.gz is not a part of the upload, create a symlink to the + # existing copy. + if pkg.orig_tar_gz: + dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz)) + os.symlink(pkg.orig_tar_gz, dest) + + # Extract the source + cmd = "dpkg-source -sn -x %s" % (dsc_filename) + (result, output) = commands.getstatusoutput(cmd) + if (result != 0): + reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result)) + reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "") + return + + if not Cnf.Find("Dir::Queue::BTSVersionTrack"): + return + + # Get the upstream version + upstr_version = re_no_epoch.sub('', dsc["version"]) + if re_strip_revision.search(upstr_version): + upstr_version = re_strip_revision.sub('', upstr_version) + + # Ensure the changelog file exists + changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version) + if not os.path.exists(changelog_filename): + reject("%s: debian/changelog not found in extracted source." % (dsc_filename)) + return + + # Parse the changelog + dsc["bts changelog"] = "" + changelog_file = utils.open_file(changelog_filename) + for line in changelog_file.readlines(): + m = re_changelog_versions.match(line) + if m: + dsc["bts changelog"] += line + changelog_file.close() + + # Check we found at least one revision in the changelog + if not dsc["bts changelog"]: + reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename)) + + ######################################## + + def check_source(): + # Bail out if: + # a) there's no source + # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files' + # or c) the orig.tar.gz is MIA + if not changes["architecture"].has_key("source") or reprocess == 2 \ + or pkg.orig_tar_gz == -1: + return + + tmpdir = utils.temp_dirname() + + # Move into the temporary directory + cwd = os.getcwd() + os.chdir(tmpdir) + + # Get the changelog version history + get_changelog_versions(cwd) + + # Move back and cleanup the temporary tree + os.chdir(cwd) + try: + shutil.rmtree(tmpdir) + except OSError, e: + if errno.errorcode[e.errno] != 'EACCES': + utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"])) + + reject("%s: source tree could not be cleanly removed." % (dsc["source"])) + # We probably have u-r or u-w directories so chmod everything + # and try again. + cmd = "chmod -R u+rwx %s" % (tmpdir) + result = os.system(cmd) + if result != 0: + utils.fubar("'%s' failed with result %s." % (cmd, result)) + shutil.rmtree(tmpdir) + except: + utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"])) + + ################################################################################ + + # FIXME: should be a debian specific check called from a hook + + def check_urgency (): + if changes["architecture"].has_key("source"): + if not changes.has_key("urgency"): + changes["urgency"] = Cnf["Urgency::Default"] + # Urgency may be followed by space & comment (policy 5.6.17) + changes["urgency"] = changes["urgency"].split(" ")[0].lower(); + if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"): + reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ") + changes["urgency"] = Cnf["Urgency::Default"] + + ################################################################################ + + def check_hashes (): + utils.check_hash(".changes", files, "md5", apt_pkg.md5sum) + utils.check_size(".changes", files) + utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum) + utils.check_size(".dsc", dsc_files) + + # This is stupid API, but it'll have to do for now until + # we actually have proper abstraction + for m in utils.ensure_hashes(changes, dsc, files, dsc_files): + reject(m) + + ################################################################################ + + # Sanity check the time stamps of files inside debs. + # [Files in the near future cause ugly warnings and extreme time + # travel can cause errors on extraction] + + def check_timestamps(): + class Tar: + def __init__(self, future_cutoff, past_cutoff): + self.reset() + self.future_cutoff = future_cutoff + self.past_cutoff = past_cutoff + + def reset(self): + self.future_files = {} + self.ancient_files = {} + + def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor): + if MTime > self.future_cutoff: + self.future_files[Name] = MTime + if MTime < self.past_cutoff: + self.ancient_files[Name] = MTime + #### + + future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"]) + past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y")) + tar = Tar(future_cutoff, past_cutoff) + for filename in files.keys(): + if files[filename]["type"] == "deb": + tar.reset() + try: + deb_file = utils.open_file(filename) + apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz") + deb_file.seek(0) + try: + apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz") + except SystemError, e: + # If we can't find a data.tar.gz, look for data.tar.bz2 instead. + if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)): + raise + deb_file.seek(0) + apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2") + deb_file.close() + # + future_files = tar.future_files.keys() + if future_files: + num_future_files = len(future_files) + future_file = future_files[0] + future_date = tar.future_files[future_file] + reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])." + % (filename, num_future_files, future_file, + time.ctime(future_date))) + # + ancient_files = tar.ancient_files.keys() + if ancient_files: + num_ancient_files = len(ancient_files) + ancient_file = ancient_files[0] + ancient_date = tar.ancient_files[ancient_file] + reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])." + % (filename, num_ancient_files, ancient_file, + time.ctime(ancient_date))) + except: + reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value)) + + ################################################################################ + + def lookup_uid_from_fingerprint(fpr): + """ + Return the uid,name,isdm for a given gpg fingerprint + + @type fpr: string + @param fpr: a 40 byte GPG fingerprint + + @return: (uid, name, isdm) + """ + cursor = DBConn().cursor() + cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr)) + qs = cursor.fetchone() + if qs: + return qs + else: + return (None, None, False) + + def check_signed_by_key(): + """Ensure the .changes is signed by an authorized uploader.""" + + (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"]) + if uid_name == None: + uid_name = "" + + # match claimed name with actual name: + if uid is None: + # This is fundamentally broken but need us to refactor how we get + # the UIDs/Fingerprints in order for us to fix it properly + uid, uid_email = changes["fingerprint"], uid + may_nmu, may_sponsor = 1, 1 + # XXX by default new dds don't have a fingerprint/uid in the db atm, + # and can't get one in there if we don't allow nmu/sponsorship + elif is_dm is False: + # If is_dm is False, we allow full upload rights + uid_email = "%s@debian.org" % (uid) + may_nmu, may_sponsor = 1, 1 + else: + # Assume limited upload rights unless we've discovered otherwise + uid_email = uid + may_nmu, may_sponsor = 0, 0 + + + if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]: + sponsored = 0 + elif uid_name in [changes["maintainername"], changes["changedbyname"]]: + sponsored = 0 + if uid_name == "": sponsored = 1 + else: + sponsored = 1 + if ("source" in changes["architecture"] and + uid_email and utils.is_email_alias(uid_email)): + sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"]) + if (changes["maintaineremail"] not in sponsor_addresses and + changes["changedbyemail"] not in sponsor_addresses): + changes["sponsoremail"] = uid_email + + if sponsored and not may_sponsor: + reject("%s is not authorised to sponsor uploads" % (uid)) + + cursor = DBConn().cursor() + if not sponsored and not may_nmu: + source_ids = [] + cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes ) + + highest_sid, highest_version = None, None + + should_reject = True + while True: + si = cursor.fetchone() + if not si: + break + + if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1: + highest_sid = si[0] + highest_version = si[1] + + if highest_sid == None: + reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"]) + else: + + cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid)) + + while True: + m = cursor.fetchone() + if not m: + break + + (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0]) + if email == uid_email or name == uid_name: + should_reject=False + break + + if should_reject == True: + reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"])) + + for b in changes["binary"].keys(): + for suite in changes["distribution"].keys(): + suite_id = DBConn().get_suite_id(suite) + + cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} ) + while True: + s = cursor.fetchone() + if not s: + break + + if s[0] != changes["source"]: + reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite)) + + for f in files.keys(): + if files[f].has_key("byhand"): + reject("%s may not upload BYHAND file %s" % (uid, f)) + if files[f].has_key("new"): + reject("%s may not upload NEW file %s" % (uid, f)) + + + ################################################################################ + ################################################################################ + + # If any file of an upload has a recent mtime then chances are good + # the file is still being uploaded. + + def upload_too_new(): + too_new = 0 + # Move back to the original directory to get accurate time stamps + cwd = os.getcwd() + os.chdir(pkg.directory) + file_list = pkg.files.keys() + file_list.extend(pkg.dsc_files.keys()) + file_list.append(pkg.changes_file) + for f in file_list: + try: + last_modified = time.time()-os.path.getmtime(f) + if last_modified < int(Cnf["Dinstall::SkipTime"]): + too_new = 1 + break + except: + pass + os.chdir(cwd) + return too_new + + ################################################################################ + + def action (): # changes["distribution"] may not exist in corner cases # (e.g. unreadable changes files) - if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType): - changes["distribution"] = {} + if not u.pkg.changes.has_key("distribution") or not isinstance(u.pkg.changes["distribution"], DictType): + u.pkg.changes["distribution"] = {} - (summary, short_summary) = Upload.build_summaries() + (summary, short_summary) = u.build_summaries() # q-unapproved hax0ring queue_info = { @@@ -137,14 -1170,13 +1150,14 @@@ "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand }, "Byhand" : { "is": is_byhand, "process": do_byhand }, "OldStableUpdate" : { "is": is_oldstableupdate, - "process": do_oldstableupdate }, + "process": do_oldstableupdate }, "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate }, "Unembargo" : { "is": is_unembargo, "process": queue_unembargo }, "Embargo" : { "is": is_embargo, "process": queue_embargo }, } + queues = [ "New", "Autobyhand", "Byhand" ] - if Cnf.FindB("Dinstall::SecurityQueueHandling"): + if cnf.FindB("Dinstall::SecurityQueueHandling"): queues += [ "Unembargo", "Embargo" ] else: queues += [ "OldStableUpdate", "StableUpdate" ] @@@ -155,25 -1187,25 +1168,25 @@@ queuekey = '' - if reject_message.find("Rejected") != -1: - if upload_too_new(): - print "SKIP (too new)\n" + reject_message, + pi = u.package_info() + + if len(u.rejects) > 0: + if u.upload_too_new(): + print "SKIP (too new)\n" + pi, prompt = "[S]kip, Quit ?" else: - print "REJECT\n" + reject_message, + print "REJECT\n" + pi prompt = "[R]eject, Skip, Quit ?" if Options["Automatic"]: answer = 'R' else: qu = None for q in queues: - if queue_info[q]["is"](): + if queue_info[q]["is"](u): qu = q break if qu: - print "%s for %s\n%s%s" % ( - qu.upper(), ", ".join(changes["distribution"].keys()), - reject_message, summary), + print "%s for %s\n%s%s" % ( qu.upper(), ", ".join(u.pkg.changes["distribution"].keys()), pi, summary) queuekey = qu[0].upper() if queuekey in "RQSA": queuekey = "D" @@@ -183,7 -1215,7 +1196,7 @@@ if Options["Automatic"]: answer = queuekey else: - print "ACCEPT\n" + reject_message + summary, + print "ACCEPT\n" + pi + summary, prompt = "[A]ccept, Skip, Quit ?" if Options["Automatic"]: answer = 'A' @@@ -196,135 -1228,182 +1209,135 @@@ answer = answer[:1].upper() if answer == 'R': - os.chdir (pkg.directory) - Upload.do_reject(0, reject_message) + os.chdir(u.pkg.directory) + u.do_reject(0, pi) elif answer == 'A': - accept(summary, short_summary) - remove_from_unchecked() + u.accept(summary, short_summary) + u.check_override() + u.remove() elif answer == queuekey: - queue_info[qu]["process"](summary, short_summary) - remove_from_unchecked() + queue_info[qu]["process"](u, summary, short_summary) + u.remove() elif answer == 'Q': sys.exit(0) -def remove_from_unchecked(): - os.chdir (pkg.directory) - for f in files.keys(): - os.unlink(f) - os.unlink(pkg.changes_file) - ################################################################################ -def accept (summary, short_summary): - Upload.accept(summary, short_summary) - Upload.check_override() +def package_to_suite(u, suite): + if not u.pkg.changes["distribution"].has_key(suite): + return False -################################################################################ + ret = True -def move_to_dir (dest, perms=0660, changesperms=0664): - utils.move (pkg.changes_file, dest, perms=changesperms) - file_keys = files.keys() - for f in file_keys: - utils.move (f, dest, perms=perms) - -################################################################################ - -def is_unembargo (): - cursor = DBConn().cursor() - cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes ) - if cursor.fetchone(): - return 1 + if not u.pkg.changes["architecture"].has_key("source"): + s = DBConn().session() + q = s.query(SrcAssociation.sa_id) + q = q.join(Suite).filter_by(suite_name=suite) + q = q.join(DBSource).filter_by(source=u.pkg.changes['source']) + q = q.filter_by(version=u.pkg.changes['version']).limit(1) - oldcwd = os.getcwd() - os.chdir(Cnf["Dir::Queue::Disembargo"]) - disdir = os.getcwd() - os.chdir(oldcwd) + if q.count() < 1: + ret = False - if pkg.directory == disdir: - if changes["architecture"].has_key("source"): - if Options["No-Action"]: return 1 + s.close() - cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')", - changes ) - cursor.execute( "COMMIT" ) - return 1 + return ret - return 0 +def package_to_queue(u, summary, short_summary, queue, perms=0660, build=True, announce=None): + cnf = Config() + dir = cnf["Dir::Queue::%s" % queue] -def queue_unembargo (summary, short_summary): - print "Moving to UNEMBARGOED holding area." - Logger.log(["Moving to unembargoed", pkg.changes_file]) + print "Moving to %s holding area" % queue.upper() + Logger.log(["Moving to %s" % queue, u.pkg.changes_file]) - Upload.dump_vars(Cnf["Dir::Queue::Unembargoed"]) - move_to_dir(Cnf["Dir::Queue::Unembargoed"]) - Upload.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"]) + u.pkg.write_dot_dak(dir) + u.move_to_dir(dir, perms=perms) + if build: + get_queue(queue.lower()).autobuild_upload(u.pkg, dir) # Check for override disparities - Upload.Subst["__SUMMARY__"] = summary - Upload.check_override() - - # Send accept mail, announce to lists, close bugs and check for - # override disparities - if not Cnf["Dinstall::Options::No-Mail"]: - Upload.Subst["__SUITE__"] = "" - mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted") + u.check_override() + + # Send accept mail, announce to lists and close bugs + if announce and not cnf["Dinstall::Options::No-Mail"]: + template = os.path.join(cnf["Dir::Templates"], announce) + u.update_subst() + u.Subst["__SUITE__"] = "" + mail_message = utils.TemplateSubst(u.Subst, template) utils.send_mail(mail_message) - Upload.announce(short_summary, 1) + u.announce(short_summary, True) ################################################################################ -def is_embargo (): - # if embargoed queues are enabled always embargo - return 1 +def is_unembargo(u): + session = DBConn().session() + cnf = Config() -def queue_embargo (summary, short_summary): - print "Moving to EMBARGOED holding area." - Logger.log(["Moving to embargoed", pkg.changes_file]) + q = session.execute("SELECT package FROM disembargo WHERE package = :source AND version = :version", u.pkg.changes) + if q.rowcount > 0: + session.close() + return True - Upload.dump_vars(Cnf["Dir::Queue::Embargoed"]) - move_to_dir(Cnf["Dir::Queue::Embargoed"]) - Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"]) + oldcwd = os.getcwd() + os.chdir(cnf["Dir::Queue::Disembargo"]) + disdir = os.getcwd() + os.chdir(oldcwd) - # Check for override disparities - Upload.Subst["__SUMMARY__"] = summary - Upload.check_override() - - # Send accept mail, announce to lists, close bugs and check for - # override disparities - if not Cnf["Dinstall::Options::No-Mail"]: - Upload.Subst["__SUITE__"] = "" - mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted") - utils.send_mail(mail_message) - Upload.announce(short_summary, 1) + ret = False -################################################################################ + if u.pkg.directory == disdir: + if u.pkg.changes["architecture"].has_key("source"): + if not Options["No-Action"]: + session.execute("INSERT INTO disembargo (package, version) VALUES (:package, :version)", u.pkg.changes) + session.commit() -def is_stableupdate (): - if not changes["distribution"].has_key("proposed-updates"): - return 0 + ret = True - if not changes["architecture"].has_key("source"): - pusuite = DBConn().get_suite_id("proposed-updates") - cursor = DBConn().cursor() - cursor.execute( """SELECT 1 FROM source s - JOIN src_associations sa ON (s.id = sa.source) - WHERE s.source = %(source)s - AND s.version = %(version)s - AND sa.suite = %(suite)s""", - {'source' : changes['source'], - 'version' : changes['version'], - 'suite' : pusuite}) - - if cursor.fetchone(): - # source is already in proposed-updates so no need to hold - return 0 + session.close() - return 1 + return ret -def do_stableupdate (summary, short_summary): - print "Moving to PROPOSED-UPDATES holding area." - Logger.log(["Moving to proposed-updates", pkg.changes_file]) +def queue_unembargo(u, summary, short_summary): + return package_to_queue(u, summary, short_summary, "Unembargoed", + perms=0660, build=True, announce='process-unchecked.accepted') - Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"]) - move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664) +################################################################################ - # Check for override disparities - Upload.Subst["__SUMMARY__"] = summary - Upload.check_override() +def is_embargo(u): + # if embargoed queues are enabled always embargo + return True -################################################################################ +def queue_embargo(u, summary, short_summary): + return package_to_queue(u, summary, short_summary, "Unembargoed", + perms=0660, build=True, announce='process-unchecked.accepted') -def is_oldstableupdate (): - if not changes["distribution"].has_key("oldstable-proposed-updates"): - return 0 +################################################################################ - if not changes["architecture"].has_key("source"): - pusuite = DBConn().get_suite_id("oldstable-proposed-updates") - cursor = DBConn().cursor() - cursor.execute( """SELECT 1 FROM source s - JOIN src_associations sa ON (s.id = sa.source) - WHERE s.source = %(source)s - AND s.version = %(version)s - AND sa.suite = %(suite)s""", - {'source' : changes['source'], - 'version' : changes['version'], - 'suite' : pusuite}) - if cursor.fetchone(): - return 0 +def is_stableupdate(u): + return package_to_suite(u, 'proposed-updates') - return 1 +def do_stableupdate(u, summary, short_summary): + return package_to_queue(u, summary, short_summary, "ProposedUpdates", + perms=0664, build=False, announce=None) -def do_oldstableupdate (summary, short_summary): - print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area." - Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file]) +################################################################################ - Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"]) - move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664) +def is_oldstableupdate(u): + return package_to_suite(u, 'oldstable-proposed-updates') - # Check for override disparities - Upload.Subst["__SUMMARY__"] = summary - Upload.check_override() +def do_oldstableupdate(u, summary, short_summary): + return package_to_queue(u, summary, short_summary, "OldProposedUpdates", + perms=0664, build=False, announce=None) ################################################################################ -def is_autobyhand (): +def is_autobyhand(u): + cnf = Config() + all_auto = 1 any_auto = 0 - for f in files.keys(): - if files[f].has_key("byhand"): + for f in u.pkg.files.keys(): + if u.pkg.files[f].has_key("byhand"): any_auto = 1 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH @@@ -336,98 -1415,95 +1349,98 @@@ continue (pckg, ver, archext) = f.split("_", 2) - if archext.count(".") < 1 or changes["version"] != ver: + if archext.count(".") < 1 or u.pkg.changes["version"] != ver: all_auto = 0 continue - ABH = Cnf.SubTree("AutomaticByHandPackages") + ABH = cnf.SubTree("AutomaticByHandPackages") if not ABH.has_key(pckg) or \ - ABH["%s::Source" % (pckg)] != changes["source"]: - print "not match %s %s" % (pckg, changes["source"]) + ABH["%s::Source" % (pckg)] != u.pkg.changes["source"]: + print "not match %s %s" % (pckg, u.pkg.changes["source"]) all_auto = 0 continue (arch, ext) = archext.split(".", 1) - if arch not in changes["architecture"]: + if arch not in u.pkg.changes["architecture"]: all_auto = 0 continue - files[f]["byhand-arch"] = arch - files[f]["byhand-script"] = ABH["%s::Script" % (pckg)] + u.pkg.files[f]["byhand-arch"] = arch + u.pkg.files[f]["byhand-script"] = ABH["%s::Script" % (pckg)] return any_auto and all_auto -def do_autobyhand (summary, short_summary): +def do_autobyhand(u, summary, short_summary): print "Attempting AUTOBYHAND." - byhandleft = 0 - for f in files.keys(): + byhandleft = True + for f, entry in u.pkg.files.items(): byhandfile = f - if not files[f].has_key("byhand"): + + if not entry.has_key("byhand"): continue - if not files[f].has_key("byhand-script"): - byhandleft = 1 + + if not entry.has_key("byhand-script"): + byhandleft = True continue os.system("ls -l %s" % byhandfile) + result = os.system("%s %s %s %s %s" % ( - files[f]["byhand-script"], byhandfile, - changes["version"], files[f]["byhand-arch"], - os.path.abspath(pkg.changes_file))) + entry["byhand-script"], + byhandfile, + u.pkg.changes["version"], + entry["byhand-arch"], + os.path.abspath(u.pkg.changes_file))) + if result == 0: os.unlink(byhandfile) - del files[f] + del entry else: print "Error processing %s, left as byhand." % (f) - byhandleft = 1 + byhandleft = True if byhandleft: - do_byhand(summary, short_summary) + do_byhand(u, summary, short_summary) else: - accept(summary, short_summary) + u.accept(summary, short_summary) + u.check_override() + # XXX: We seem to be missing a u.remove() here + # This might explain why we get byhand leftovers in unchecked - mhy ################################################################################ -def is_byhand (): - for f in files.keys(): - if files[f].has_key("byhand"): - return 1 - return 0 - -def do_byhand (summary, short_summary): - print "Moving to BYHAND holding area." - Logger.log(["Moving to byhand", pkg.changes_file]) +def is_byhand(u): + for f in u.pkg.files.keys(): + if u.pkg.files[f].has_key("byhand"): + return True + return False - Upload.dump_vars(Cnf["Dir::Queue::Byhand"]) - move_to_dir(Cnf["Dir::Queue::Byhand"]) - - # Check for override disparities - Upload.Subst["__SUMMARY__"] = summary - Upload.check_override() +def do_byhand(u, summary, short_summary): + return package_to_queue(u, summary, short_summary, "Byhand", + perms=0660, build=False, announce=None) ################################################################################ -def is_new (): - for f in files.keys(): - if files[f].has_key("new"): - return 1 - return 0 +def is_new(u): + for f in u.pkg.files.keys(): + if u.pkg.files[f].has_key("new"): + return True + return False -def acknowledge_new (summary, short_summary): - Subst = Upload.Subst +def acknowledge_new(u, summary, short_summary): + cnf = Config() print "Moving to NEW holding area." - Logger.log(["Moving to new", pkg.changes_file]) + Logger.log(["Moving to new", u.pkg.changes_file]) - Upload.dump_vars(Cnf["Dir::Queue::New"]) - move_to_dir(Cnf["Dir::Queue::New"], perms=0640, changesperms=0644) + u.pkg.write_dot_dak(cnf["Dir::Queue::New"]) + u.move_to_dir(cnf["Dir::Queue::New"], perms=0640, changesperms=0644) if not Options["No-Mail"]: print "Sending new ack." - Subst["__SUMMARY__"] = summary - new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new") + template = os.path.join(cnf["Dir::Templates"], 'process-unchecked.new') + u.Subst["__SUMMARY__"] = summary + new_ack_message = utils.TemplateSubst(u.Subst, template) utils.send_mail(new_ack_message) ################################################################################ @@@ -441,99 -1517,73 +1454,99 @@@ # we force the .orig.tar.gz into the .changes structure and reprocess # the .changes file. -def process_it (changes_file): - global reprocess, reject_message +def process_it(changes_file): + global Logger + + cnf = Config() + + holding = Holding() + + u = Upload() + u.pkg.changes_file = changes_file + u.pkg.directory = os.getcwd() + u.logger = Logger - # Reset some globals - reprocess = 1 - Upload.init_vars() # Some defaults in case we can't fully process the .changes file - changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"] - changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"] - reject_message = "" + u.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"] + u.pkg.changes["changedby2047"] = cnf["Dinstall::MyEmailAddress"] - # Absolutize the filename to avoid the requirement of being in the - # same directory as the .changes file. - pkg.changes_file = os.path.abspath(changes_file) + # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header + bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $" + if cnf.has_key("Dinstall::Bcc"): + u.Subst["__BCC__"] = bcc + "\nBcc: %s" % (cnf["Dinstall::Bcc"]) + else: + u.Subst["__BCC__"] = bcc # Remember where we are so we can come back after cd-ing into the - # holding directory. - pkg.directory = os.getcwd() + # holding directory. TODO: Fix this stupid hack + u.prevdir = os.getcwd() + + # TODO: Figure out something better for this (or whether it's even + # necessary - it seems to have been for use when we were + # still doing the is_unchecked check; reprocess = 2) + u.reprocess = 1 try: # If this is the Real Thing(tm), copy things into a private # holding directory first to avoid replacable file races. if not Options["No-Action"]: - os.chdir(Cnf["Dir::Queue::Holding"]) - copy_to_holding(pkg.changes_file) + os.chdir(cnf["Dir::Queue::Holding"]) + + # Absolutize the filename to avoid the requirement of being in the + # same directory as the .changes file. + holding.copy_to_holding(os.path.abspath(changes_file)) + # Relativize the filename so we use the copy in holding # rather than the original... - pkg.changes_file = os.path.basename(pkg.changes_file) - changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject) - if changes["fingerprint"]: - valid_changes_p = check_changes() + changespath = os.path.basename(u.pkg.changes_file) + + (u.pkg.changes["fingerprint"], rejects) = utils.check_signature(changespath) + + if u.pkg.changes["fingerprint"]: + valid_changes_p = u.load_changes(changespath) else: - valid_changes_p = 0 + valid_changes_p = False + u.rejects.extend(rejects) + if valid_changes_p: - while reprocess: - check_distributions() - check_files() - valid_dsc_p = check_dsc() + while u.reprocess: + u.check_distributions() + u.check_files(not Options["No-Action"]) + valid_dsc_p = u.check_dsc(not Options["No-Action"]) if valid_dsc_p: - check_source() - check_hashes() - check_urgency() - check_timestamps() - check_signed_by_key() - Upload.update_subst(reject_message) - action() + u.check_source() + u.check_hashes() + u.check_urgency() + u.check_timestamps() + u.check_signed_by_key() + + action(u) + except SystemExit: raise + except: print "ERROR" traceback.print_exc(file=sys.stderr) - pass # Restore previous WD - os.chdir(pkg.directory) + os.chdir(u.prevdir) ############################################################################### def main(): - global Cnf, Options, Logger + global Options, Logger + cnf = Config() changes_files = init() # -n/--dry-run invalidates some other options which would involve things happening if Options["No-Action"]: Options["Automatic"] = "" + # Initialize our Holding singleton + holding = Holding() + # Ensure all the arguments we were given are .changes files for f in changes_files: if not f.endswith(".changes"): @@@ -541,18 -1591,20 +1554,18 @@@ changes_files.remove(f) if changes_files == []: - if Cnf["Dinstall::Options::Directory"] == "": + if cnf["Dinstall::Options::Directory"] == "": utils.fubar("Need at least one .changes file as an argument.") else: sys.exit(0) # Check that we aren't going to clash with the daily cron job - - if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]: + if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (cnf["Dir::Lock"])) and not Options["No-Lock"]: utils.fubar("Archive maintenance in progress. Try again later.") # Obtain lock if not in no-action mode and initialize the log - if not Options["No-Action"]: - lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT) + lock_fd = os.open(cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT) try: fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError, e: @@@ -560,7 -1612,15 +1573,7 @@@ utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.") else: raise - Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked") - - # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header - bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $" - if Cnf.has_key("Dinstall::Bcc"): - Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"]) - else: - Upload.Subst["__BCC__"] = bcc - + Logger = daklog.Logger(cnf, "process-unchecked") # Sort the .changes files so that we process sourceful ones first changes_files.sort(utils.changes_compare) @@@ -572,11 -1632,10 +1585,11 @@@ process_it (changes_file) finally: if not Options["No-Action"]: - clean_holding() + holding.clean() + + accept_count = SummaryStats().accept_count + accept_bytes = SummaryStats().accept_bytes - accept_count = Upload.accept_count - accept_bytes = Upload.accept_bytes if accept_count: sets = "set" if accept_count > 1: diff --combined dak/transitions.py index 005d109e,a214337e..9c4e7d8b --- a/dak/transitions.py +++ b/dak/transitions.py @@@ -29,6 -29,7 +29,6 @@@ Display, edit and check the release man ################################################################################ import os -import pg import sys import time import errno @@@ -36,8 -37,7 +36,8 @@@ import fcnt import tempfile import pwd import apt_pkg -from daklib import database + +from daklib.dbconn import * from daklib import utils from daklib.dak_exceptions import TransitionsError from daklib.regexes import re_broken_package @@@ -46,6 -46,7 +46,6 @@@ import yam # Globals Cnf = None #: Configuration, apt_pkg.Configuration Options = None #: Parsed CommandLine arguments -projectB = None #: database connection, pgobject ################################################################################ @@@ -59,7 -60,7 +59,7 @@@ def init() @attention: This function may run B{within sudo} """ - global Cnf, Options, projectB + global Cnf, Options apt_pkg.init() @@@ -84,15 -85,13 +84,13 @@@ if Options["help"]: usage() - whoami = os.getuid() - whoamifull = pwd.getpwuid(whoami) - username = whoamifull[0] + username = utils.getusername() if username != "dak": print "Non-dak user: %s" % username Options["sudo"] = "y" - projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"])) - database.init(Cnf, projectB) + # Initialise DB connection + DBConn() ################################################################################ @@@ -398,26 -397,22 +396,26 @@@ def check_transitions(transitions) to_dump = 0 to_remove = [] info = {} + + session = DBConn().session() + # Now look through all defined transitions for trans in transitions: t = transitions[trans] source = t["source"] expected = t["new"] - # Will be None if nothing is in testing. - current = database.get_suite_version(source, "testing") + # Will be an empty list if nothing is in testing. + sources = get_source_in_suite(source, "testing", session) info[trans] = get_info(trans, source, expected, t["rm"], t["reason"], t["packages"]) print info[trans] - if current == None: + if len(sources) < 1: # No package in testing print "Transition source %s not in testing, transition still ongoing." % (source) else: + current = sources[0].version compare = apt_pkg.VersionCompare(current, expected) if compare < 0: # This is still valid, the current version in database is older than @@@ -531,16 -526,15 +529,16 @@@ def transition_info(transitions) source = t["source"] expected = t["new"] - # Will be None if nothing is in testing. - current = database.get_suite_version(source, "testing") + # Will be empty list if nothing is in testing. + sources = get_suite_version(source, "testing") print get_info(trans, source, expected, t["rm"], t["reason"], t["packages"]) - if current == None: + if len(sources) < 1: # No package in testing print "Transition source %s not in testing, transition still ongoing." % (source) else: + current = sources[0].version compare = apt_pkg.VersionCompare(current, expected) print "Apt compare says: %s" % (compare) if compare < 0: diff --combined daklib/database.py index fe7d1931,ad725703..cbdfad04 --- a/daklib/database.py +++ b/daklib/database.py @@@ -35,6 -35,7 +35,6 @@@ import tim import types import utils import pg -from binary import Binary ################################################################################ @@@ -79,6 -80,31 +79,6 @@@ def init (config, sql) Cnf = config projectB = sql - -def do_query(query): - """ - Executes a database query. Writes statistics / timing to stderr. - - @type query: string - @param query: database query string, passed unmodified - - @return: db result - - @warning: The query is passed B{unmodified}, so be careful what you use this for. - """ - sys.stderr.write("query: \"%s\" ... " % (query)) - before = time.time() - r = projectB.query(query) - time_diff = time.time()-before - sys.stderr.write("took %.3f seconds.\n" % (time_diff)) - if type(r) is int: - sys.stderr.write("int result: %s\n" % (r)) - elif type(r) is types.NoneType: - sys.stderr.write("result: None\n") - else: - sys.stderr.write("pgresult: %s\n" % (r.getresult())) - return r - ################################################################################ def get_suite_id (suite): @@@ -849,8 -875,8 +849,8 @@@ def has_new_comment(package, version, i @type version: string @param version: package version - @type version: boolean - @param version: ignore trainee comments + @type ignore_trainee: boolean + @param ignore_trainee: ignore trainee comments @rtype: boolean @return: true/false diff --combined daklib/utils.py index 7b139767,073b8133..5edf2cb1 --- a/daklib/utils.py +++ b/daklib/utils.py @@@ -35,14 -35,12 +35,14 @@@ import tempfil import traceback import stat import apt_pkg -import database import time import re import string import email as modemail + +from dbconn import DBConn, get_architecture, get_component, get_suite from dak_exceptions import * +from textutils import fix_maintainer from regexes import re_html_escaping, html_escaping, re_single_line_field, \ re_multi_line_field, re_srchasver, re_verwithext, \ re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \ @@@ -386,6 -384,41 +386,6 @@@ def _ensure_dsc_hash(dsc, dsc_files, ha ################################################################################ -def ensure_hashes(changes, dsc, files, dsc_files): - rejmsg = [] - - # Make sure we recognise the format of the Files: field in the .changes - format = changes.get("format", "0.0").split(".", 1) - if len(format) == 2: - format = int(format[0]), int(format[1]) - else: - format = int(float(format[0])), 0 - - # We need to deal with the original changes blob, as the fields we need - # might not be in the changes dict serialised into the .dak anymore. - orig_changes = parse_deb822(changes['filecontents']) - - # Copy the checksums over to the current changes dict. This will keep - # the existing modifications to it intact. - for field in orig_changes: - if field.startswith('checksums-'): - changes[field] = orig_changes[field] - - # Check for unsupported hashes - rejmsg.extend(check_hash_fields(".changes", changes)) - rejmsg.extend(check_hash_fields(".dsc", dsc)) - - # We have to calculate the hash if we have an earlier changes version than - # the hash appears in rather than require it exist in the changes file - for hashname, hashfunc, version in known_hashes: - rejmsg.extend(_ensure_changes_hash(changes, format, version, files, - hashname, hashfunc)) - if "source" in changes["architecture"]: - rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname, - hashfunc)) - - return rejmsg - def parse_checksums(where, files, manifest, hashname): rejmsg = [] field = 'checksums-%s' % hashname @@@ -479,6 -512,92 +479,6 @@@ def build_file_list(changes, is_a_dsc=0 ################################################################################ -def force_to_utf8(s): - """ - Forces a string to UTF-8. If the string isn't already UTF-8, - it's assumed to be ISO-8859-1. - """ - try: - unicode(s, 'utf-8') - return s - except UnicodeError: - latin1_s = unicode(s,'iso8859-1') - return latin1_s.encode('utf-8') - -def rfc2047_encode(s): - """ - Encodes a (header) string per RFC2047 if necessary. If the - string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1. - """ - try: - codecs.lookup('ascii')[1](s) - return s - except UnicodeError: - pass - try: - codecs.lookup('utf-8')[1](s) - h = email.Header.Header(s, 'utf-8', 998) - return str(h) - except UnicodeError: - h = email.Header.Header(s, 'iso-8859-1', 998) - return str(h) - -################################################################################ - -# 'The standard sucks, but my tool is supposed to interoperate -# with it. I know - I'll fix the suckage and make things -# incompatible!' - -def fix_maintainer (maintainer): - """ - Parses a Maintainer or Changed-By field and returns: - 1. an RFC822 compatible version, - 2. an RFC2047 compatible version, - 3. the name - 4. the email - - The name is forced to UTF-8 for both 1. and 3.. If the name field - contains '.' or ',' (as allowed by Debian policy), 1. and 2. are - switched to 'email (name)' format. - - """ - maintainer = maintainer.strip() - if not maintainer: - return ('', '', '', '') - - if maintainer.find("<") == -1: - email = maintainer - name = "" - elif (maintainer[0] == "<" and maintainer[-1:] == ">"): - email = maintainer[1:-1] - name = "" - else: - m = re_parse_maintainer.match(maintainer) - if not m: - raise ParseMaintError, "Doesn't parse as a valid Maintainer field." - name = m.group(1) - email = m.group(2) - - # Get an RFC2047 compliant version of the name - rfc2047_name = rfc2047_encode(name) - - # Force the name to be UTF-8 - name = force_to_utf8(name) - - if name.find(',') != -1 or name.find('.') != -1: - rfc822_maint = "%s (%s)" % (email, name) - rfc2047_maint = "%s (%s)" % (email, rfc2047_name) - else: - rfc822_maint = "%s <%s>" % (name, email) - rfc2047_maint = "%s <%s>" % (rfc2047_name, email) - - if email.find("@") == -1 and email.find("buildd_") != 0: - raise ParseMaintError, "No @ found in email address part." - - return (rfc822_maint, rfc2047_maint, name, email) - -################################################################################ - def send_mail (message, filename=""): """sendmail wrapper, takes _either_ a message string or a file as arguments""" @@@ -666,6 -785,16 +666,6 @@@ def which_alias_file() ################################################################################ -# Escape characters which have meaning to SQL's regex comparison operator ('~') -# (woefully incomplete) - -def regex_safe (s): - s = s.replace('+', '\\\\+') - s = s.replace('.', '\\\\.') - return s - -################################################################################ - def TemplateSubst(map, filename): """ Perform a substition of template """ templatefile = open_file(filename) @@@ -691,6 -820,9 +691,9 @@@ def warn(msg) def whoami (): return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '') + def getusername (): + return pwd.getpwuid(os.getuid())[0] + ################################################################################ def size_type (c): @@@ -866,19 -998,15 +869,19 @@@ def get_conf() def parse_args(Options): """ Handle -a, -c and -s arguments; returns them as SQL constraints """ + # XXX: This should go away and everything which calls it be converted + # to use SQLA properly. For now, we'll just fix it not to use + # the old Pg interface though + session = DBConn().session() # Process suite if Options["Suite"]: suite_ids_list = [] - for suite in split_args(Options["Suite"]): - suite_id = database.get_suite_id(suite) - if suite_id == -1: - warn("suite '%s' not recognised." % (suite)) + for suitename in split_args(Options["Suite"]): + suite = get_suite(suitename, session=session) + if suite.suite_id is None: + warn("suite '%s' not recognised." % (suite.suite_name)) else: - suite_ids_list.append(suite_id) + suite_ids_list.append(suite.suite_id) if suite_ids_list: con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ]) else: @@@ -889,12 -1017,12 +892,12 @@@ # Process component if Options["Component"]: component_ids_list = [] - for component in split_args(Options["Component"]): - component_id = database.get_component_id(component) - if component_id == -1: - warn("component '%s' not recognised." % (component)) + for componentname in split_args(Options["Component"]): + component = get_component(componentname, session=session) + if component is None: + warn("component '%s' not recognised." % (componentname)) else: - component_ids_list.append(component_id) + component_ids_list.append(component.component_id) if component_ids_list: con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ]) else: @@@ -904,18 -1032,18 +907,18 @@@ # Process architecture con_architectures = "" + check_source = 0 if Options["Architecture"]: arch_ids_list = [] - check_source = 0 - for architecture in split_args(Options["Architecture"]): - if architecture == "source": + for archname in split_args(Options["Architecture"]): + if archname == "source": check_source = 1 else: - architecture_id = database.get_architecture_id(architecture) - if architecture_id == -1: - warn("architecture '%s' not recognised." % (architecture)) + arch = get_architecture(archname, session=session) + if arch is None: + warn("architecture '%s' not recognised." % (archname)) else: - arch_ids_list.append(architecture_id) + arch_ids_list.append(arch.arch_id) if arch_ids_list: con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ]) else: @@@ -1153,7 -1281,7 +1156,7 @@@ def gpg_keyring_args(keyrings=None) ################################################################################ -def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None): +def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None): """ Check the signature of a file and return the fingerprint if the signature is valid or 'None' if it's not. The first argument is the @@@ -1169,16 -1297,14 +1172,16 @@@ used. """ + rejects = [] + # Ensure the filename contains no shell meta-characters or other badness if not re_taint_free.match(sig_filename): - reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename)) - return None + rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename)) + return (None, rejects) if data_filename and not re_taint_free.match(data_filename): - reject("!!WARNING!! tainted data filename: '%s'." % (data_filename)) - return None + rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename)) + return (None, rejects) if not keyrings: keyrings = Cnf.ValueList("Dinstall::GPGKeyring") @@@ -1189,8 -1315,8 +1192,8 @@@ if autofetch: error_msg = retrieve_key(sig_filename) if error_msg: - reject(error_msg) - return None + rejects.append(error_msg) + return (None, rejects) # Build the command line status_read, status_write = os.pipe() @@@ -1205,32 -1331,40 +1208,32 @@@ # If we failed to parse the status-fd output, let's just whine and bail now if internal_error: - reject("internal error while performing signature check on %s." % (sig_filename)) - reject(internal_error, "") - reject("Please report the above errors to the Archive maintainers by replying to this mail.", "") - return None + rejects.append("internal error while performing signature check on %s." % (sig_filename)) + rejects.append(internal_error, "") + rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "") + return (None, rejects) - bad = "" # Now check for obviously bad things in the processed output if keywords.has_key("KEYREVOKED"): - reject("The key used to sign %s has been revoked." % (sig_filename)) - bad = 1 + rejects.append("The key used to sign %s has been revoked." % (sig_filename)) if keywords.has_key("BADSIG"): - reject("bad signature on %s." % (sig_filename)) - bad = 1 + rejects.append("bad signature on %s." % (sig_filename)) if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"): - reject("failed to check signature on %s." % (sig_filename)) - bad = 1 + rejects.append("failed to check signature on %s." % (sig_filename)) if keywords.has_key("NO_PUBKEY"): args = keywords["NO_PUBKEY"] if len(args) >= 1: key = args[0] - reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename)) - bad = 1 + rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename)) if keywords.has_key("BADARMOR"): - reject("ASCII armour of signature was corrupt in %s." % (sig_filename)) - bad = 1 + rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename)) if keywords.has_key("NODATA"): - reject("no signature found in %s." % (sig_filename)) - bad = 1 + rejects.append("no signature found in %s." % (sig_filename)) if keywords.has_key("EXPKEYSIG"): args = keywords["EXPKEYSIG"] if len(args) >= 1: key = args[0] - reject("Signature made by expired key 0x%s" % (key)) - bad = 1 + rejects.append("Signature made by expired key 0x%s" % (key)) if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"): args = keywords["KEYEXPIRED"] expiredate="" @@@ -1243,33 -1377,38 +1246,33 @@@ expiredate = "unknown (%s)" % (timestamp) else: expiredate = timestamp - reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate)) - bad = 1 + rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate)) - if bad: - return None + if len(rejects) > 0: + return (None, rejects) # Next check gpgv exited with a zero return code if exit_status: - reject("gpgv failed while checking %s." % (sig_filename)) + rejects.append("gpgv failed while checking %s." % (sig_filename)) if status.strip(): - reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "") + rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "") else: - reject(prefix_multi_line_string(output, " [GPG output:] "), "") - return None + rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "") + return (None, rejects) # Sanity check the good stuff we expect if not keywords.has_key("VALIDSIG"): - reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename)) - bad = 1 + rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename)) else: args = keywords["VALIDSIG"] if len(args) < 1: - reject("internal error while checking signature on %s." % (sig_filename)) - bad = 1 + rejects.append("internal error while checking signature on %s." % (sig_filename)) else: fingerprint = args[0] if not keywords.has_key("GOODSIG"): - reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename)) - bad = 1 + rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename)) if not keywords.has_key("SIG_ID"): - reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename)) - bad = 1 + rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename)) # Finally ensure there's not something we don't recognise known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="", @@@ -1278,12 -1417,13 +1281,12 @@@ for keyword in keywords.keys(): if not known_keywords.has_key(keyword): - reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename)) - bad = 1 + rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename)) - if bad: - return None + if len(rejects) > 0: + return (None, rejects) else: - return fingerprint + return (fingerprint, []) ################################################################################