X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=jennifer;h=e11153fc49f13241efde802c10ad699643202c7c;hb=9540d873fa78598454af57f5f8a4875969ed0439;hp=ace638049f2e48130e3a9d9fc288c223e5c1c347;hpb=c511e48da2f9c0735fa59fa57558816b183a99b2;p=dak.git diff --git a/jennifer b/jennifer index ace63804..e11153fc 100755 --- a/jennifer +++ b/jennifer @@ -1,8 +1,8 @@ #!/usr/bin/env python # Checks Debian packages from Incoming -# Copyright (C) 2000, 2001, 2002 James Troup -# $Id: jennifer,v 1.25 2002-07-14 17:07:45 troup Exp $ +# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005 James Troup +# $Id: jennifer,v 1.65 2005-12-05 05:35:47 ajt Exp $ # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -29,7 +29,7 @@ ################################################################################ -import FCNTL, errno, fcntl, gzip, os, re, select, shutil, stat, string, sys, time, traceback; +import commands, errno, fcntl, os, re, shutil, stat, sys, time, tempfile, traceback; import apt_inst, apt_pkg; import db_access, katie, logging, utils; @@ -37,15 +37,15 @@ from types import *; ################################################################################ -re_bad_diff = re.compile("^[\-\+][\-\+][\-\+] /dev/null"); -re_is_changes = re.compile(r"(.+?)_(.+?)_(.+?)\.changes$"); re_valid_version = re.compile(r"^([0-9]+:)?[0-9A-Za-z\.\-\+:]+$"); re_valid_pkg_name = re.compile(r"^[\dA-Za-z][\dA-Za-z\+\-\.]+$"); +re_changelog_versions = re.compile(r"^\w[-+0-9a-z.]+ \([^\(\) \t]+\)"); +re_strip_revision = re.compile(r"-([^-]+)$"); ################################################################################ # Globals -jennifer_version = "$Revision: 1.25 $"; +jennifer_version = "$Revision: 1.65 $"; Cnf = None; Options = None; @@ -87,6 +87,12 @@ def init(): changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv); Options = Cnf.SubTree("Dinstall::Options") + if Options["Help"]: + usage(); + elif Options["Version"]: + print "jennifer %s" % (jennifer_version); + sys.exit(0); + Katie = katie.Katie(Cnf); changes = Katie.pkg.changes; @@ -97,7 +103,7 @@ def init(): return changes_files; -######################################################################################### +################################################################################ def usage (exit_code=0): print """Usage: dinstall [OPTION]... [CHANGES]... @@ -109,187 +115,12 @@ def usage (exit_code=0): -V, --version display the version number and exit""" sys.exit(exit_code) -######################################################################################### - -# Our very own version of commands.getouputstatus(), hacked to support -# gpgv's status fd. -def get_status_output(cmd, status_read, status_write): - cmd = ['/bin/sh', '-c', cmd]; - p2cread, p2cwrite = os.pipe(); - c2pread, c2pwrite = os.pipe(); - errout, errin = os.pipe(); - pid = os.fork(); - if pid == 0: - # Child - os.close(0); - os.close(1); - os.dup(p2cread); - os.dup(c2pwrite); - os.close(2); - os.dup(errin); - for i in range(3, 256): - if i != status_write: - try: - os.close(i); - except: - pass; - try: - os.execvp(cmd[0], cmd); - finally: - os._exit(1); - - # parent - os.close(p2cread) - os.dup2(c2pread, c2pwrite); - os.dup2(errout, errin); - - output = status = ""; - while 1: - i, o, e = select.select([c2pwrite, errin, status_read], [], []); - more_data = []; - for fd in i: - r = os.read(fd, 8196); - if len(r) > 0: - more_data.append(fd); - if fd == c2pwrite or fd == errin: - output = output + r; - elif fd == status_read: - status = status + r; - else: - utils.fubar("Unexpected file descriptor [%s] returned from select\n" % (fd)); - if not more_data: - pid, exit_status = os.waitpid(pid, 0) - try: - os.close(status_write); - os.close(status_read); - os.close(c2pread); - os.close(c2pwrite); - os.close(p2cwrite); - os.close(errin); - os.close(errout); - except: - pass; - break; - - return output, status, exit_status; - -######################################################################################### - -def Dict(**dict): return dict +################################################################################ def reject (str, prefix="Rejected: "): global reject_message; if str: - reject_message = reject_message + prefix + str + "\n"; - -######################################################################################### - -def check_signature (filename): - if not utils.re_taint_free.match(os.path.basename(filename)): - reject("!!WARNING!! tainted filename: '%s'." % (filename)); - return 0; - - status_read, status_write = os.pipe(); - cmd = "gpgv --status-fd %s --keyring %s --keyring %s %s" \ - % (status_write, Cnf["Dinstall::PGPKeyring"], Cnf["Dinstall::GPGKeyring"], filename); - (output, status, exit_status) = get_status_output(cmd, status_read, status_write); - - # Process the status-fd output - keywords = {}; - bad = internal_error = ""; - for line in string.split(status, '\n'): - line = string.strip(line); - if line == "": - continue; - split = string.split(line); - if len(split) < 2: - internal_error = internal_error + "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line); - continue; - (gnupg, keyword) = split[:2]; - if gnupg != "[GNUPG:]": - internal_error = internal_error + "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg); - continue; - args = split[2:]; - if keywords.has_key(keyword) and keyword != "NODATA": - internal_error = internal_error + "found duplicate status token ('%s').\n" % (keyword); - continue; - else: - keywords[keyword] = args; - - # If we failed to parse the status-fd output, let's just whine and bail now - if internal_error: - reject("internal error while performing signature check on %s." % (filename)); - reject(internal_error, ""); - reject("Please report the above errors to the Archive maintainers by replying to this mail.", ""); - return None; - - # Now check for obviously bad things in the processed output - if keywords.has_key("SIGEXPIRED"): - reject("key used to sign %s has expired." % (filename)); - bad = 1; - if keywords.has_key("KEYREVOKED"): - reject("key used to sign %s has been revoked." % (filename)); - bad = 1; - if keywords.has_key("BADSIG"): - reject("bad signature on %s." % (filename)); - bad = 1; - if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"): - reject("failed to check signature on %s." % (filename)); - bad = 1; - if keywords.has_key("NO_PUBKEY"): - reject("key used to sign %s not found in keyring." % (filename)); - bad = 1; - if keywords.has_key("BADARMOR"): - reject("ascii armour of signature was corrupt in %s." % (filename)); - bad = 1; - if keywords.has_key("NODATA"): - reject("no signature found in %s." % (filename)); - bad = 1; - - if bad: - return None; - - # Next check gpgv exited with a zero return code - if exit_status: - reject("gpgv failed while checking %s." % (filename)); - if string.strip(status): - reject(utils.prefix_multi_line_string(status, " [GPG status-fd output:] "), ""); - else: - reject(utils.prefix_multi_line_string(output, " [GPG output:] "), ""); - return None; - - # Sanity check the good stuff we expect - if not keywords.has_key("VALIDSIG"): - reject("signature on %s does not appear to be valid [No VALIDSIG]." % (filename)); - bad = 1; - else: - args = keywords["VALIDSIG"]; - if len(args) < 1: - reject("internal error while checking signature on %s." % (filename)); - bad = 1; - else: - fingerprint = args[0]; - if not keywords.has_key("GOODSIG"): - reject("signature on %s does not appear to be valid [No GOODSIG]." % (filename)); - bad = 1; - if not keywords.has_key("SIG_ID"): - reject("signature on %s does not appear to be valid [No SIG_ID]." % (filename)); - bad = 1; - - # Finally ensure there's not something we don't recognise - known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="", - SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="", - NODATA=""); - - for keyword in keywords.keys(): - if not known_keywords.has_key(keyword): - reject("found unknown status token '%s' from gpgv with args '%s' in %s." % (keyword, repr(keywords[keyword]), filename)); - bad = 1; - - if bad: - return None; - else: - return fingerprint; + reject_message += prefix + str + "\n"; ################################################################################ @@ -306,7 +137,7 @@ def copy_to_holding(filename): # Shouldn't happen, but will if, for example, someone lists a # file twice in the .changes. if errno.errorcode[e.errno] == 'EEXIST': - reject("%s already exists in holding area; can not overwrite." % (base_filename)); + reject("%s: already exists in holding area; can not overwrite." % (base_filename)); return; raise; @@ -317,17 +148,16 @@ def copy_to_holding(filename): # O_CREAT | O_EXCLed ghost file, so add the file to the list # of 'in holding' even if it's not the real file. if errno.errorcode[e.errno] == 'ENOENT': - reject("can not copy %s to holding area: file not found." % (base_filename)); + reject("%s: can not copy to holding area: file not found." % (base_filename)); os.unlink(dest); return; elif errno.errorcode[e.errno] == 'EACCES': - reject("can not copy %s to holding area: read permission denied." % (base_filename)); + reject("%s: can not copy to holding area: read permission denied." % (base_filename)); os.unlink(dest); return; raise; in_holding[base_filename] = ""; - return dest; ################################################################################ @@ -338,7 +168,7 @@ def clean_holding(): os.chdir(Cnf["Dir::Queue::Holding"]); for file in in_holding.keys(): if os.path.exists(file): - if string.find(file, '/') != -1: + if file.find('/') != -1: utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (file)); else: os.unlink(file); @@ -350,34 +180,30 @@ def clean_holding(): def check_changes(): filename = pkg.changes_file; - # Default in case we bail out - changes["maintainer822"] = Cnf["Dinstall::MyEmailAddress"]; - changes["changedby822"] = Cnf["Dinstall::MyEmailAddress"]; - changes["architecture"] = {}; - # Parse the .changes field into a dictionary try: changes.update(utils.parse_changes(filename)); except utils.cant_open_exc: - reject("can't read changes file '%s'." % (filename)); + reject("%s: can't read file." % (filename)); return 0; except utils.changes_parse_error_exc, line: - reject("error parsing changes file '%s', can't grok: %s." % (filename, line)); + reject("%s: parse error, can't grok: %s." % (filename, line)); return 0; # Parse the Files field from the .changes into another dictionary try: files.update(utils.build_file_list(changes)); except utils.changes_parse_error_exc, line: - reject("error parsing changes file '%s', can't grok: %s." % (filename, line)); + reject("%s: parse error, can't grok: %s." % (filename, line)); except utils.nk_format_exc, format: - reject("unknown format '%s' of changes file '%s'." % (format, filename)); + reject("%s: unknown format '%s'." % (filename, format)); return 0; # Check for mandatory fields - for i in ("source", "binary", "architecture", "version", "distribution", "maintainer", "files"): + for i in ("source", "binary", "architecture", "version", "distribution", + "maintainer", "files", "changes", "description"): if not changes.has_key(i): - reject("Missing field `%s' in changes file." % (i)); + reject("%s: Missing mandatory field `%s'." % (filename, i)); return 0 # Avoid errors during later tests # Split multi-value fields into a lower-level dictionary @@ -386,20 +212,35 @@ def check_changes(): if o != "": del changes[i] changes[i] = {} - for j in string.split(o): + for j in o.split(): changes[i][j] = 1 - # Fix the Maintainer: field to be RFC822 compatible - (changes["maintainer822"], changes["maintainername"], changes["maintaineremail"]) = utils.fix_maintainer (changes["maintainer"]) - - # Fix the Changed-By: field to be RFC822 compatible; if it exists. - (changes["changedby822"], changes["changedbyname"], changes["changedbyemail"]) = utils.fix_maintainer(changes.get("changed-by","")); + # Fix the Maintainer: field to be RFC822/2047 compatible + try: + (changes["maintainer822"], changes["maintainer2047"], + changes["maintainername"], changes["maintaineremail"]) = \ + utils.fix_maintainer (changes["maintainer"]); + except utils.ParseMaintError, msg: + reject("%s: Maintainer field ('%s') failed to parse: %s" \ + % (filename, changes["maintainer"], msg)); + + # ...likewise for the Changed-By: field if it exists. + try: + (changes["changedby822"], changes["changedby2047"], + changes["changedbyname"], changes["changedbyemail"]) = \ + utils.fix_maintainer (changes.get("changed-by", "")); + except utils.ParseMaintError, msg: + (changes["changedby822"], changes["changedby2047"], + changes["changedbyname"], changes["changedbyemail"]) = \ + ("", "", "", "") + reject("%s: Changed-By field ('%s') failed to parse: %s" \ + % (filename, changes["changed-by"], msg)); # Ensure all the values in Closes: are numbers if changes.has_key("closes"): for i in changes["closes"].keys(): if katie.re_isanum.match (i) == None: - reject("`%s' from Closes field isn't a number." % (i)); + reject("%s: `%s' from Closes field isn't a number." % (filename, i)); # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison) @@ -411,7 +252,12 @@ def check_changes(): base_filename = os.path.basename(filename); for dir in [ "Accepted", "Byhand", "Done", "New" ]: if os.path.exists(Cnf["Dir::Queue::%s" % (dir) ]+'/'+base_filename): - reject("a changes file with the same name already exists in the %s directory." % (dir)); + reject("%s: a file with this name already exists in the %s directory." % (base_filename, dir)); + + # Check the .changes is non-empty + if not files: + reject("%s: nothing to do (Files field is empty)." % (base_filename)) + return 0; return 1; @@ -422,7 +268,7 @@ def check_distributions(): # Handle suite mappings for map in Cnf.ValueList("SuiteMappings"): - args = string.split(map); + args = map.split(); type = args[0]; if type == "map" or type == "silent-map": (source, dest) = args[1:3]; @@ -431,6 +277,9 @@ def check_distributions(): changes["distribution"][dest] = 1; if type != "silent-map": reject("Mapping %s to %s." % (source, dest),""); + if changes.has_key("distribution-version"): + if changes["distribution-version"].has_key(source): + changes["distribution-version"][source]=dest elif type == "map-unreleased": (source, dest) = args[1:3]; if changes["distribution"].has_key(source): @@ -445,6 +294,17 @@ def check_distributions(): if changes["distribution"].has_key(suite): del changes["distribution"][suite]; reject("Ignoring %s as a target suite." % (suite), "Warning: "); + elif type == "reject": + suite = args[1]; + if changes["distribution"].has_key(suite): + reject("Uploads to %s are not accepted." % (suite)); + elif type == "propup-version": + # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes" + # + # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'} + if changes["distribution"].has_key(args[1]): + changes.setdefault("distribution-version", {}) + for suite in args[2:]: changes["distribution-version"][suite]=suite # Ensure there is (still) a target distribution if changes["distribution"].keys() == []: @@ -457,6 +317,48 @@ def check_distributions(): ################################################################################ +def check_deb_ar(filename, control): + """Sanity check the ar of a .deb, i.e. that there is: + + o debian-binary + o control.tar.gz + o data.tar.gz or data.tar.bz2 + +in that order, and nothing else. If the third member is a +data.tar.bz2, an additional check is performed for the required +Pre-Depends on dpkg (>= 1.10.24).""" + cmd = "ar t %s" % (filename) + (result, output) = commands.getstatusoutput(cmd) + if result != 0: + reject("%s: 'ar t' invocation failed." % (filename)) + reject(utils.prefix_multi_line_string(output, " [ar output:] "), "") + chunks = output.split('\n') + if len(chunks) != 3: + reject("%s: found %d chunks, expected 3." % (filename, len(chunks))) + if chunks[0] != "debian-binary": + reject("%s: first chunk is '%s', expected 'debian-binary'." % (filename, chunks[0])) + if chunks[1] != "control.tar.gz": + reject("%s: second chunk is '%s', expected 'control.tar.gz'." % (filename, chunks[1])) + if chunks[2] == "data.tar.bz2": + # Packages using bzip2 compression must have a Pre-Depends on dpkg >= 1.10.24. + found_needed_predep = 0 + for parsed_dep in apt_pkg.ParseDepends(control.Find("Pre-Depends", "")): + for atom in parsed_dep: + (dep, version, constraint) = atom + if dep != "dpkg" or (constraint != ">=" and constraint != ">>") or \ + len(parsed_dep) > 1: # or'ed deps don't count + continue + if (constraint == ">=" and apt_pkg.VersionCompare(version, "1.10.24") < 0) or \ + (constraint == ">>" and apt_pkg.VersionCompare(version, "1.10.23") < 0): + continue + found_needed_predep = 1 + if not found_needed_predep: + reject("%s: uses bzip2 compression, but doesn't Pre-Depend on dpkg (>= 1.10.24)" % (filename)) + elif chunks[2] != "data.tar.gz": + reject("%s: third chunk is '%s', expected 'data.tar.gz' or 'data.tar.bz2'." % (filename, chunks[2])) + +################################################################################ + def check_files(): global reprocess @@ -473,7 +375,27 @@ def check_files(): copy_to_holding(file); os.chdir(cwd); + # Check there isn't already a .changes or .katie file of the same name in + # the proposed-updates "CopyChanges" or "CopyKatie" storage directories. + # [NB: this check must be done post-suite mapping] + base_filename = os.path.basename(pkg.changes_file); + katie_filename = base_filename[:-8]+".katie" + for suite in changes["distribution"].keys(): + copychanges = "Suite::%s::CopyChanges" % (suite); + if Cnf.has_key(copychanges) and \ + os.path.exists(Cnf[copychanges]+"/"+base_filename): + reject("%s: a file with this name already exists in %s" \ + % (base_filename, Cnf[copychanges])); + + copykatie = "Suite::%s::CopyKatie" % (suite); + if Cnf.has_key(copykatie) and \ + os.path.exists(Cnf[copykatie]+"/"+katie_filename): + reject("%s: a file with this name already exists in %s" \ + % (katie_filename, Cnf[copykatie])); + reprocess = 0; + has_binaries = 0; + has_source = 0; for file in file_keys: # Ensure the file does not already exist in one of the accepted directories @@ -494,11 +416,12 @@ def check_files(): files[file]["type"] = "unreadable"; continue; # If it's byhand skip remaining checks - if files[file]["section"] == "byhand": + if files[file]["section"] == "byhand" or files[file]["section"] == "raw-installer": files[file]["byhand"] = 1; files[file]["type"] = "byhand"; # Checks for a binary package... - elif utils.re_isadeb.match(file) != None: + elif utils.re_isadeb.match(file): + has_binaries = 1; files[file]["type"] = "deb"; # Extract package control information @@ -544,30 +467,35 @@ def check_files(): if not changes["architecture"].has_key(architecture): reject("%s: control file lists arch as `%s', which isn't in changes file." % (file, architecture)); + # Sanity-check the Depends field + depends = control.Find("Depends"); + if depends == '': + reject("%s: Depends field is empty." % (file)); + # Check the section & priority match those given in the .changes (non-fatal) - if control.Find("Section") != None and files[file]["section"] != "" and files[file]["section"] != control.Find("Section"): + if control.Find("Section") and files[file]["section"] != "" and files[file]["section"] != control.Find("Section"): reject("%s control file lists section as `%s', but changes file has `%s'." % (file, control.Find("Section", ""), files[file]["section"]), "Warning: "); - if control.Find("Priority") != None and files[file]["priority"] != "" and files[file]["priority"] != control.Find("Priority"): + if control.Find("Priority") and files[file]["priority"] != "" and files[file]["priority"] != control.Find("Priority"): reject("%s control file lists priority as `%s', but changes file has `%s'." % (file, control.Find("Priority", ""), files[file]["priority"]),"Warning: "); files[file]["package"] = package; files[file]["architecture"] = architecture; files[file]["version"] = version; files[file]["maintainer"] = control.Find("Maintainer", ""); - if file[-5:] == ".udeb": + if file.endswith(".udeb"): files[file]["dbtype"] = "udeb"; - elif file[-4:] == ".deb": + elif file.endswith(".deb"): files[file]["dbtype"] = "deb"; else: reject("%s is neither a .deb or a .udeb." % (file)); files[file]["source"] = control.Find("Source", files[file]["package"]); # Get the source version source = files[file]["source"]; - source_version = "" - if string.find(source, "(") != -1: - m = utils.re_extract_src_version.match(source) - source = m.group(1) - source_version = m.group(2) + source_version = ""; + if source.find("(") != -1: + m = utils.re_extract_src_version.match(source); + source = m.group(1); + source_version = m.group(2); if not source_version: source_version = files[file]["version"]; files[file]["source package"] = source; @@ -597,7 +525,7 @@ def check_files(): reject("source version (%s) for %s doesn't match changes version %s." % (source_version, file, changes["version"])); else: # Check in the SQL database - if not Katie.source_exists(source_package, source_version): + if not Katie.source_exists(source_package, source_version, changes["distribution"].keys()): # Check in one of the other directories source_epochless_version = utils.re_no_epoch.sub('', source_version); dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version); @@ -610,10 +538,13 @@ def check_files(): # Check the version and for file overwrites reject(Katie.check_binary_against_db(file),""); + check_deb_ar(file, control) + # Checks for a source package... else: m = utils.re_issource.match(file); - if m != None: + if m: + has_source = 1; files[file]["package"] = m.group(1); files[file]["version"] = m.group(2); files[file]["type"] = m.group(3); @@ -636,7 +567,7 @@ def check_files(): # Check the signature of a .dsc file if files[file]["type"] == "dsc": - dsc["fingerprint"] = check_signature(file); + dsc["fingerprint"] = utils.check_signature(file, reject); files[file]["architecture"] = "source"; @@ -654,19 +585,16 @@ def check_files(): # Handle component mappings for map in Cnf.ValueList("ComponentMappings"): - (source, dest) = string.split(map); + (source, dest) = map.split(); if files[file]["component"] == source: files[file]["original component"] = source; files[file]["component"] = dest; + # Ensure the component is valid for the target suite if Cnf.has_key("Suite:%s::Components" % (suite)) and \ files[file]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)): reject("unknown component `%s' for suite `%s'." % (files[file]["component"], suite)); - continue - - # See if the package is NEW - if not Katie.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file): - files[file]["new"] = 1; + continue; # Validate the component component = files[file]["component"]; @@ -675,8 +603,12 @@ def check_files(): reject("file '%s' has unknown component '%s'." % (file, component)); continue; + # See if the package is NEW + if not Katie.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file): + files[file]["new"] = 1; + # Validate the priority - if string.find(files[file]["priority"],'/') != -1: + if files[file]["priority"].find('/') != -1: reject("file '%s' has invalid priority '%s' [contains '/']." % (file, files[file]["priority"])); # Determine the location @@ -711,116 +643,249 @@ SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, # If the .changes file says it has source, it must have source. if changes["architecture"].has_key("source"): - has_source = 0; - for file in file_keys: - if files[file]["type"] == "dsc": - has_source = 1; if not has_source: reject("no source found and Architecture line in changes mention source."); + if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"): + reject("source only uploads are not supported."); + ############################################################################### -def check_dsc (): +def check_dsc(): global reprocess; + # Ensure there is source to check + if not changes["architecture"].has_key("source"): + return 1; + + # Find the .dsc + dsc_filename = None; for file in files.keys(): - # The .orig.tar.gz can disappear out from under us is it's a - # duplicate of one in the archive. - if not files.has_key(file): - continue; if files[file]["type"] == "dsc": - # Parse the .dsc file - try: - dsc.update(utils.parse_changes(file, dsc_whitespace_rules=1)); - except utils.cant_open_exc: - # if not -n copy_to_holding() will have done this for us... - if Options["No-Action"]: - reject("can't read .dsc file '%s'." % (file)); - except utils.changes_parse_error_exc, line: - reject("error parsing .dsc file '%s', can't grok: %s." % (file, line)); - except utils.invalid_dsc_format_exc, line: - reject("syntax error in .dsc file '%s', line %s." % (file, line)); - # Build up the file list of files mentioned by the .dsc + if dsc_filename: + reject("can not process a .changes file with multiple .dsc's."); + return 0; + else: + dsc_filename = file; + + # If there isn't one, we have nothing to do. (We have reject()ed the upload already) + if not dsc_filename: + reject("source uploads must contain a dsc file"); + return 0; + + # Parse the .dsc file + try: + dsc.update(utils.parse_changes(dsc_filename, signing_rules=1)); + except utils.cant_open_exc: + # if not -n copy_to_holding() will have done this for us... + if Options["No-Action"]: + reject("%s: can't read file." % (dsc_filename)); + except utils.changes_parse_error_exc, line: + reject("%s: parse error, can't grok: %s." % (dsc_filename, line)); + except utils.invalid_dsc_format_exc, line: + reject("%s: syntax error on line %s." % (dsc_filename, line)); + # Build up the file list of files mentioned by the .dsc + try: + dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1)); + except utils.no_files_exc: + reject("%s: no Files: field." % (dsc_filename)); + return 0; + except utils.changes_parse_error_exc, line: + reject("%s: parse error, can't grok: %s." % (dsc_filename, line)); + return 0; + + # Enforce mandatory fields + for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"): + if not dsc.has_key(i): + reject("%s: missing mandatory field `%s'." % (dsc_filename, i)); + return 0; + + # Validate the source and version fields + if not re_valid_pkg_name.match(dsc["source"]): + reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"])); + if not re_valid_version.match(dsc["version"]): + reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"])); + + # Bumping the version number of the .dsc breaks extraction by stable's + # dpkg-source. So let's not do that... + if dsc["format"] != "1.0": + reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename)); + + # Validate the Maintainer field + try: + utils.fix_maintainer (dsc["maintainer"]); + except utils.ParseMaintError, msg: + reject("%s: Maintainer field ('%s') failed to parse: %s" \ + % (dsc_filename, dsc["maintainer"], msg)); + + # Validate the build-depends field(s) + for field_name in [ "build-depends", "build-depends-indep" ]: + field = dsc.get(field_name); + if field: + # Check for broken dpkg-dev lossage... + if field.startswith("ARRAY"): + reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title())); + + # Have apt try to parse them... try: - dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1)); - except utils.no_files_exc: - reject("no Files: field in .dsc file."); - continue; - except utils.changes_parse_error_exc, line: - reject("error parsing .dsc file '%s', can't grok: %s." % (file, line)); - continue; + apt_pkg.ParseSrcDepends(field); + except: + reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title())); + pass; + + # Ensure the version number in the .dsc matches the version number in the .changes + epochless_dsc_version = utils.re_no_epoch.sub('', dsc["version"]); + changes_version = files[dsc_filename]["version"]; + if epochless_dsc_version != files[dsc_filename]["version"]: + reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version)); + + # Ensure there is a .tar.gz in the .dsc file + has_tar = 0; + for f in dsc_files.keys(): + m = utils.re_issource.match(f); + if not m: + reject("%s: %s in Files field not recognised as source." % (dsc_filename, f)); + type = m.group(3); + if type == "orig.tar.gz" or type == "tar.gz": + has_tar = 1; + if not has_tar: + reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename)); + + # Ensure source is newer than existing source in target suites + reject(Katie.check_source_against_db(dsc_filename),""); + + (reject_msg, is_in_incoming) = Katie.check_dsc_against_db(dsc_filename); + reject(reject_msg, ""); + if is_in_incoming: + if not Options["No-Action"]: + copy_to_holding(is_in_incoming); + orig_tar_gz = os.path.basename(is_in_incoming); + files[orig_tar_gz] = {}; + files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]; + files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]; + files[orig_tar_gz]["section"] = files[dsc_filename]["section"]; + files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]; + files[orig_tar_gz]["component"] = files[dsc_filename]["component"]; + files[orig_tar_gz]["type"] = "orig.tar.gz"; + reprocess = 2; - # Enforce mandatory fields - for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"): - if not dsc.has_key(i): - reject("Missing field `%s' in dsc file." % (i)); - - # Validate the source and version fields - if dsc.has_key("source") and not re_valid_pkg_name.match(dsc["source"]): - reject("%s: invalid source name '%s'." % (file, dsc["source"])); - if dsc.has_key("version") and not re_valid_version.match(dsc["version"]): - reject("%s: invalid version number '%s'." % (file, dsc["version"])); - - # The dpkg maintainer from hell strikes again! Bumping the - # version number of the .dsc breaks extraction by stable's - # dpkg-source. - if dsc["format"] != "1.0": - reject("""[dpkg-sucks] source package was produced by a broken version - of dpkg-dev 1.9.1{3,4}; please rebuild with >= 1.9.15 version - installed."""); - - # Ensure the version number in the .dsc matches the version number in the .changes - epochless_dsc_version = utils.re_no_epoch.sub('', dsc.get("version")); - changes_version = files[file]["version"]; - if epochless_dsc_version != files[file]["version"]: - reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version)); - - # Ensure there is a .tar.gz in the .dsc file - has_tar = 0; - for f in dsc_files.keys(): - m = utils.re_issource.match(f); - if not m: - reject("%s mentioned in the Files field of %s not recognised as source." % (f, file)); - type = m.group(3); - if type == "orig.tar.gz" or type == "tar.gz": - has_tar = 1; - if not has_tar: - reject("no .tar.gz or .orig.tar.gz listed in the Files field of %s." % (file)); - - # Ensure source is newer than existing source in target suites - reject(Katie.check_source_against_db(file),""); - - (reject_msg, is_in_incoming) = Katie.check_dsc_against_db(file); - reject(reject_msg, ""); - if is_in_incoming: - if not Options["No-Action"]: - copy_to_holding(is_in_incoming); - orig_tar_gz = os.path.basename(is_in_incoming); - files[orig_tar_gz] = {}; - files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]; - files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]; - files[orig_tar_gz]["section"] = files[file]["section"]; - files[orig_tar_gz]["priority"] = files[file]["priority"]; - files[orig_tar_gz]["component"] = files[file]["component"]; - files[orig_tar_gz]["type"] = "orig.tar.gz"; - reprocess = 2; + return 1; ################################################################################ -# Some cunning stunt broke dpkg-source in dpkg 1.8{,.1}; detect the -# resulting bad source packages and reject them. +def get_changelog_versions(source_dir): + """Extracts a the source package and (optionally) grabs the + version history out of debian/changelog for the BTS.""" + + # Find the .dsc (again) + dsc_filename = None; + for file in files.keys(): + if files[file]["type"] == "dsc": + dsc_filename = file; -# Even more amusingly the fix in 1.8.1.1 didn't actually fix the -# problem just changed the symptoms. + # If there isn't one, we have nothing to do. (We have reject()ed the upload already) + if not dsc_filename: + return; -def check_diff (): - for filename in files.keys(): - if files[filename]["type"] == "diff.gz": - file = gzip.GzipFile(filename, 'r'); - for line in file.readlines(): - if re_bad_diff.search(line): - reject("[dpkg-sucks] source package was produced by a broken version of dpkg-dev 1.8.x; please rebuild with >= 1.8.3 version installed."); - break; + # Create a symlink mirror of the source files in our temporary directory + for f in files.keys(): + m = utils.re_issource.match(f); + if m: + src = os.path.join(source_dir, f); + # If a file is missing for whatever reason, give up. + if not os.path.exists(src): + return; + type = m.group(3); + if type == "orig.tar.gz" and pkg.orig_tar_gz: + continue; + dest = os.path.join(os.getcwd(), f); + os.symlink(src, dest); + + # If the orig.tar.gz is not a part of the upload, create a symlink to the + # existing copy. + if pkg.orig_tar_gz: + dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz)); + os.symlink(pkg.orig_tar_gz, dest); + + # Extract the source + cmd = "dpkg-source -sn -x %s" % (dsc_filename); + (result, output) = commands.getstatusoutput(cmd); + if (result != 0): + reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result)); + reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), ""); + return; + + if not Cnf.Find("Dir::Queue::BTSVersionTrack"): + return; + + # Get the upstream version + upstr_version = utils.re_no_epoch.sub('', dsc["version"]); + if re_strip_revision.search(upstr_version): + upstr_version = re_strip_revision.sub('', upstr_version); + + # Ensure the changelog file exists + changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version); + if not os.path.exists(changelog_filename): + reject("%s: debian/changelog not found in extracted source." % (dsc_filename)); + return; + + # Parse the changelog + dsc["bts changelog"] = ""; + changelog_file = utils.open_file(changelog_filename); + for line in changelog_file.readlines(): + m = re_changelog_versions.match(line); + if m: + dsc["bts changelog"] += line; + changelog_file.close(); + + # Check we found at least one revision in the changelog + if not dsc["bts changelog"]: + reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename)); + +######################################## + +def check_source(): + # Bail out if: + # a) there's no source + # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files' + # or c) the orig.tar.gz is MIA + if not changes["architecture"].has_key("source") or reprocess == 2 \ + or pkg.orig_tar_gz == -1: + return; + + # Create a temporary directory to extract the source into + if Options["No-Action"]: + tmpdir = tempfile.mktemp(); + else: + # We're in queue/holding and can create a random directory. + tmpdir = "%s" % (os.getpid()); + os.mkdir(tmpdir); + + # Move into the temporary directory + cwd = os.getcwd(); + os.chdir(tmpdir); + + # Get the changelog version history + get_changelog_versions(cwd); + + # Move back and cleanup the temporary tree + os.chdir(cwd); + try: + shutil.rmtree(tmpdir); + except OSError, e: + if errno.errorcode[e.errno] != 'EACCES': + utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"])); + + reject("%s: source tree could not be cleanly removed." % (dsc["source"])); + # We probably have u-r or u-w directories so chmod everything + # and try again. + cmd = "chmod -R u+rwx %s" % (tmpdir) + result = os.system(cmd) + if result != 0: + utils.fubar("'%s' failed with result %s." % (cmd, result)); + shutil.rmtree(tmpdir); + except: + utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"])); ################################################################################ @@ -833,32 +898,44 @@ def check_urgency (): if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"): reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: "); changes["urgency"] = Cnf["Urgency::Default"]; - changes["urgency"] = string.lower(changes["urgency"]); + changes["urgency"] = changes["urgency"].lower(); ################################################################################ -def md5sum_size_check(file, orig_file): - try: - file_handle = utils.open_file(file); - except utils.cant_open_exc: - return; - - # Check md5sum - if apt_pkg.md5sum(file_handle) != files[file]["md5sum"]: - reject("%s: md5sum check failed." % (file)); - file_handle.close(); - # Check size - actual_size = os.stat(file)[stat.ST_SIZE]; - size = int(files[file]["size"]); - if size != actual_size: - reject("%s: actual file size (%s) does not match size (%s) in %s" - % (file, actual_size, size, orig_file)); - def check_md5sums (): for file in files.keys(): - md5sum_size_check(file, ".changes"); + try: + file_handle = utils.open_file(file); + except utils.cant_open_exc: + continue; + + # Check md5sum + if apt_pkg.md5sum(file_handle) != files[file]["md5sum"]: + reject("%s: md5sum check failed." % (file)); + file_handle.close(); + # Check size + actual_size = os.stat(file)[stat.ST_SIZE]; + size = int(files[file]["size"]); + if size != actual_size: + reject("%s: actual file size (%s) does not match size (%s) in .changes" + % (file, actual_size, size)); + for file in dsc_files.keys(): - md5sum_size_check(file, ".dsc"); + try: + file_handle = utils.open_file(file); + except utils.cant_open_exc: + continue; + + # Check md5sum + if apt_pkg.md5sum(file_handle) != dsc_files[file]["md5sum"]: + reject("%s: md5sum check failed." % (file)); + file_handle.close(); + # Check size + actual_size = os.stat(file)[stat.ST_SIZE]; + size = int(dsc_files[file]["size"]); + if size != actual_size: + reject("%s: actual file size (%s) does not match size (%s) in .dsc" + % (file, actual_size, size)); ################################################################################ @@ -894,7 +971,14 @@ def check_timestamps(): deb_file = utils.open_file(filename); apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz"); deb_file.seek(0); - apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz"); + try: + apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz") + except SystemError, e: + # If we can't find a data.tar.gz, look for data.tar.bz2 instead. + if not re.match(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)): + raise + deb_file.seek(0) + apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2") deb_file.close(); # future_files = tar.future_files.keys(); @@ -915,7 +999,7 @@ def check_timestamps(): % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date))); except: - reject("%s: timestamp check failed; caught %s" % (filename, sys.exc_type)); + reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value)); ################################################################################ ################################################################################ @@ -952,18 +1036,24 @@ def action (): (summary, short_summary) = Katie.build_summaries(); - byhand = new = ""; - for file in files.keys(): - if files[file].has_key("byhand"): - byhand = 1 - elif files[file].has_key("new"): - new = 1 + # q-unapproved hax0ring + queue_info = { + "New": { "is": is_new, "process": acknowledge_new }, + "Byhand" : { "is": is_byhand, "process": do_byhand }, + "Unembargo" : { "is": is_unembargo, "process": queue_unembargo }, + "Embargo" : { "is": is_embargo, "process": queue_embargo }, + } + queues = [ "New", "Byhand" ] + if Cnf.FindB("Dinstall::SecurityQueueHandling"): + queues += [ "Unembargo", "Embargo" ] (prompt, answer) = ("", "XXX") if Options["No-Action"] or Options["Automatic"]: answer = 'S' - if string.find(reject_message, "Rejected") != -1: + queuekey = '' + + if reject_message.find("Rejected") != -1: if upload_too_new(): print "SKIP (too new)\n" + reject_message,; prompt = "[S]kip, Quit ?"; @@ -972,80 +1062,146 @@ def action (): prompt = "[R]eject, Skip, Quit ?"; if Options["Automatic"]: answer = 'R'; - elif new: - print "NEW to %s\n%s%s" % (string.join(changes["distribution"].keys(), ", "), reject_message, summary),; - prompt = "[N]ew, Skip, Quit ?"; - if Options["Automatic"]: - answer = 'N'; - elif byhand: - print "BYHAND\n" + reject_message + summary,; - prompt = "[B]yhand, Skip, Quit ?"; - if Options["Automatic"]: - answer = 'B'; else: - print "ACCEPT\n" + reject_message + summary,; - prompt = "[A]ccept, Skip, Quit ?"; - if Options["Automatic"]: - answer = 'A'; + queue = None + for q in queues: + if queue_info[q]["is"](): + queue = q + break + if queue: + print "%s for %s\n%s%s" % ( + queue.upper(), ", ".join(changes["distribution"].keys()), + reject_message, summary), + queuekey = queue[0].upper() + if queuekey in "RQSA": + queuekey = "D" + prompt = "[D]ivert, Skip, Quit ?" + else: + prompt = "[%s]%s, Skip, Quit ?" % (queuekey, queue[1:].lower()) + if Options["Automatic"]: + answer = queuekey + else: + print "ACCEPT\n" + reject_message + summary,; + prompt = "[A]ccept, Skip, Quit ?"; + if Options["Automatic"]: + answer = 'A'; - while string.find(prompt, answer) == -1: + while prompt.find(answer) == -1: answer = utils.our_raw_input(prompt); m = katie.re_default_answer.match(prompt); if answer == "": answer = m.group(1); - answer = string.upper(answer[:1]); + answer = answer[:1].upper(); if answer == 'R': os.chdir (pkg.directory); Katie.do_reject(0, reject_message); elif answer == 'A': accept(summary, short_summary); - elif answer == 'B': - do_byhand(summary); - elif answer == 'N': - acknowledge_new (summary); + remove_from_unchecked() + elif answer == queuekey: + queue_info[queue]["process"](summary) + remove_from_unchecked() elif answer == 'Q': sys.exit(0) +def remove_from_unchecked(): + os.chdir (pkg.directory); + for file in files.keys(): + os.unlink(file); + os.unlink(pkg.changes_file); + ################################################################################ def accept (summary, short_summary): Katie.accept(summary, short_summary); Katie.check_override(); - # Finally, remove the originals from the unchecked directory - os.chdir (pkg.directory); - for file in files.keys(): - os.unlink(file); - os.unlink(pkg.changes_file); +################################################################################ + +def move_to_dir (dest, perms=0660, changesperms=0664): + utils.move (pkg.changes_file, dest, perms=changesperms); + file_keys = files.keys(); + for file in file_keys: + utils.move (file, dest, perms=perms); + +################################################################################ + +def is_unembargo (): + q = Katie.projectB.query( + "SELECT package FROM disembargo WHERE package = '%s' AND version = '%s'" % + (changes["source"], changes["version"])) + ql = q.getresult() + if ql: + return 1 + + if pkg.directory == Cnf["Dir::Queue::Disembargo"].rstrip("/"): + if changes["architecture"].has_key("source"): + if Options["No-Action"]: return 1 + + Katie.projectB.query( + "INSERT INTO disembargo (package, version) VALUES ('%s', '%s')" % + (changes["source"], changes["version"])) + return 1 + + return 0 + +def queue_unembargo (summary): + print "Moving to UNEMBARGOED holding area." + Logger.log(["Moving to unembargoed", pkg.changes_file]); + + Katie.dump_vars(Cnf["Dir::Queue::Unembargoed"]); + move_to_dir(Cnf["Dir::Queue::Unembargoed"]) + Katie.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"]) + + # Check for override disparities + Katie.Subst["__SUMMARY__"] = summary; + Katie.check_override(); ################################################################################ +def is_embargo (): + return 0 + +def queue_embargo (summary): + print "Moving to EMBARGOED holding area." + Logger.log(["Moving to embargoed", pkg.changes_file]); + + Katie.dump_vars(Cnf["Dir::Queue::Embargoed"]); + move_to_dir(Cnf["Dir::Queue::Embargoed"]) + Katie.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"]) + + # Check for override disparities + Katie.Subst["__SUMMARY__"] = summary; + Katie.check_override(); + +################################################################################ + +def is_byhand (): + for file in files.keys(): + if files[file].has_key("byhand"): + return 1 + return 0 + def do_byhand (summary): print "Moving to BYHAND holding area." Logger.log(["Moving to byhand", pkg.changes_file]); Katie.dump_vars(Cnf["Dir::Queue::Byhand"]); - - file_keys = files.keys(); - - # Move all the files into the byhand directory - utils.move (pkg.changes_file, Cnf["Dir::Queue::Byhand"]); - for file in file_keys: - utils.move (file, Cnf["Dir::Queue::Byhand"], perms=0660); + move_to_dir(Cnf["Dir::Queue::Byhand"]) # Check for override disparities Katie.Subst["__SUMMARY__"] = summary; Katie.check_override(); - # Finally remove the originals. - os.chdir (pkg.directory); - for file in file_keys: - os.unlink(file); - os.unlink(pkg.changes_file); - ################################################################################ +def is_new (): + for file in files.keys(): + if files[file].has_key("new"): + return 1 + return 0 + def acknowledge_new (summary): Subst = Katie.Subst; @@ -1053,25 +1209,13 @@ def acknowledge_new (summary): Logger.log(["Moving to new", pkg.changes_file]); Katie.dump_vars(Cnf["Dir::Queue::New"]); - - file_keys = files.keys(); - - # Move all the files into the 'new' directory - utils.move (pkg.changes_file, Cnf["Dir::Queue::New"]); - for file in file_keys: - utils.move (file, Cnf["Dir::Queue::New"], perms=0660); + move_to_dir(Cnf["Dir::Queue::New"]) if not Options["No-Mail"]: print "Sending new ack."; Subst["__SUMMARY__"] = summary; new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.new"); - utils.send_mail(new_ack_message,""); - - # Finally remove the originals. - os.chdir (pkg.directory); - for file in file_keys: - os.unlink(file); - os.unlink(pkg.changes_file); + utils.send_mail(new_ack_message); ################################################################################ @@ -1090,6 +1234,9 @@ def process_it (changes_file): # Reset some globals reprocess = 1; Katie.init_vars(); + # Some defaults in case we can't fully process the .changes file + changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"]; + changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"]; reject_message = ""; # Absolutize the filename to avoid the requirement of being in the @@ -1109,14 +1256,18 @@ def process_it (changes_file): # Relativize the filename so we use the copy in holding # rather than the original... pkg.changes_file = os.path.basename(pkg.changes_file); - changes["fingerprint"] = check_signature(pkg.changes_file); - changes_valid = check_changes(); - if changes_valid: + changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject); + if changes["fingerprint"]: + valid_changes_p = check_changes(); + else: + valid_changes_p = 0; + if valid_changes_p: while reprocess: check_distributions(); check_files(); - check_dsc(); - check_diff(); + valid_dsc_p = check_dsc(); + if valid_dsc_p: + check_source(); check_md5sums(); check_urgency(); check_timestamps(); @@ -1135,24 +1286,17 @@ def process_it (changes_file): ############################################################################### def main(): - global Cnf, Options, Logger, nmu; + global Cnf, Options, Logger; changes_files = init(); - if Options["Help"]: - usage(); - - if Options["Version"]: - print "jennifer %s" % (jennifer_version); - sys.exit(0); - # -n/--dry-run invalidates some other options which would involve things happening if Options["No-Action"]: Options["Automatic"] = ""; # Ensure all the arguments we were given are .changes files for file in changes_files: - if file[-8:] != ".changes": + if not file.endswith(".changes"): utils.warn("Ignoring '%s' because it's not a .changes file." % (file)); changes_files.remove(file); @@ -1161,14 +1305,20 @@ def main(): # Check that we aren't going to clash with the daily cron job - if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (Cnf["Dir::Root"])) and not Options["No-Lock"]: + if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]: utils.fubar("Archive maintenance in progress. Try again later."); # Obtain lock if not in no-action mode and initialize the log if not Options["No-Action"]: lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT); - fcntl.lockf(lock_fd, FCNTL.F_TLOCK); + try: + fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB); + except IOError, e: + if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN': + utils.fubar("Couldn't obtain lock; assuming another jennifer is already running."); + else: + raise; Logger = Katie.Logger = logging.Logger(Cnf, "jennifer"); # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header @@ -1196,7 +1346,7 @@ def main(): if accept_count: sets = "set" if accept_count > 1: - sets = "sets" + sets = "sets"; print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes))); Logger.log(["total",accept_count,accept_bytes]);