X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=jennifer;h=c451a0538280ad911fe44c3bb2aeda9411b27c23;hb=3686a00f1001f2d5692fa5e706b898053e39191a;hp=a567e1ef9cd919400d8c4b1a190e4e976fb8c9c0;hpb=763770541a41d6497105a2c533c93a83095b17ea;p=dak.git diff --git a/jennifer b/jennifer index a567e1ef..c451a053 100755 --- a/jennifer +++ b/jennifer @@ -1,8 +1,8 @@ #!/usr/bin/env python # Checks Debian packages from Incoming -# Copyright (C) 2000, 2001, 2002 James Troup -# $Id: jennifer,v 1.24 2002-06-22 22:34:35 troup Exp $ +# Copyright (C) 2000, 2001, 2002, 2003, 2004 James Troup +# $Id: jennifer,v 1.46 2004-04-01 17:14:25 troup Exp $ # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -29,7 +29,7 @@ ################################################################################ -import FCNTL, errno, fcntl, gzip, os, re, select, shutil, stat, string, sys, time, traceback; +import commands, errno, fcntl, os, re, shutil, stat, sys, time, tempfile, traceback; import apt_inst, apt_pkg; import db_access, katie, logging, utils; @@ -41,11 +41,13 @@ re_bad_diff = re.compile("^[\-\+][\-\+][\-\+] /dev/null"); re_is_changes = re.compile(r"(.+?)_(.+?)_(.+?)\.changes$"); re_valid_version = re.compile(r"^([0-9]+:)?[0-9A-Za-z\.\-\+:]+$"); re_valid_pkg_name = re.compile(r"^[\dA-Za-z][\dA-Za-z\+\-\.]+$"); +re_changelog_versions = re.compile(r"^\w[-+0-9a-z.]+ \([^\(\) \t]+\)"); +re_strip_revision = re.compile(r"-([^-]+)$"); ################################################################################ # Globals -jennifer_version = "$Revision: 1.24 $"; +jennifer_version = "$Revision: 1.46 $"; Cnf = None; Options = None; @@ -87,6 +89,12 @@ def init(): changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv); Options = Cnf.SubTree("Dinstall::Options") + if Options["Help"]: + usage(); + elif Options["Version"]: + print "jennifer %s" % (jennifer_version); + sys.exit(0); + Katie = katie.Katie(Cnf); changes = Katie.pkg.changes; @@ -97,7 +105,7 @@ def init(): return changes_files; -######################################################################################### +################################################################################ def usage (exit_code=0): print """Usage: dinstall [OPTION]... [CHANGES]... @@ -109,187 +117,12 @@ def usage (exit_code=0): -V, --version display the version number and exit""" sys.exit(exit_code) -######################################################################################### - -# Our very own version of commands.getouputstatus(), hacked to support -# gpgv's status fd. -def get_status_output(cmd, status_read, status_write): - cmd = ['/bin/sh', '-c', cmd]; - p2cread, p2cwrite = os.pipe(); - c2pread, c2pwrite = os.pipe(); - errout, errin = os.pipe(); - pid = os.fork(); - if pid == 0: - # Child - os.close(0); - os.close(1); - os.dup(p2cread); - os.dup(c2pwrite); - os.close(2); - os.dup(errin); - for i in range(3, 256): - if i != status_write: - try: - os.close(i); - except: - pass; - try: - os.execvp(cmd[0], cmd); - finally: - os._exit(1); - - # parent - os.close(p2cread) - os.dup2(c2pread, c2pwrite); - os.dup2(errout, errin); - - output = status = ""; - while 1: - i, o, e = select.select([c2pwrite, errin, status_read], [], []); - more_data = []; - for fd in i: - r = os.read(fd, 8196); - if len(r) > 0: - more_data.append(fd); - if fd == c2pwrite or fd == errin: - output = output + r; - elif fd == status_read: - status = status + r; - else: - utils.fubar("Unexpected file descriptor [%s] returned from select\n" % (fd)); - if not more_data: - pid, exit_status = os.waitpid(pid, 0) - try: - os.close(status_write); - os.close(status_read); - os.close(c2pread); - os.close(c2pwrite); - os.close(p2cwrite); - os.close(errin); - os.close(errout); - except: - pass; - break; - - return output, status, exit_status; - -######################################################################################### - -def Dict(**dict): return dict +################################################################################ def reject (str, prefix="Rejected: "): global reject_message; if str: - reject_message = reject_message + prefix + str + "\n"; - -######################################################################################### - -def check_signature (filename): - if not utils.re_taint_free.match(os.path.basename(filename)): - reject("!!WARNING!! tainted filename: '%s'." % (filename)); - return 0; - - status_read, status_write = os.pipe(); - cmd = "gpgv --status-fd %s --keyring %s --keyring %s %s" \ - % (status_write, Cnf["Dinstall::PGPKeyring"], Cnf["Dinstall::GPGKeyring"], filename); - (output, status, exit_status) = get_status_output(cmd, status_read, status_write); - - # Process the status-fd output - keywords = {}; - bad = internal_error = ""; - for line in string.split(status, '\n'): - line = string.strip(line); - if line == "": - continue; - split = string.split(line); - if len(split) < 2: - internal_error = internal_error + "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line); - continue; - (gnupg, keyword) = split[:2]; - if gnupg != "[GNUPG:]": - internal_error = internal_error + "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg); - continue; - args = split[2:]; - if keywords.has_key(keyword) and keyword != "NODATA": - internal_error = internal_error + "found duplicate status token ('%s').\n" % (keyword); - continue; - else: - keywords[keyword] = args; - - # If we failed to parse the status-fd output, let's just whine and bail now - if internal_error: - reject("internal error while performing signature check on %s." % (filename)); - reject(internal_error, ""); - reject("Please report the above errors to the Archive maintainers by replying to this mail.", ""); - return None; - - # Now check for obviously bad things in the processed output - if keywords.has_key("SIGEXPIRED"): - reject("key used to sign %s has expired." % (filename)); - bad = 1; - if keywords.has_key("KEYREVOKED"): - reject("key used to sign %s has been revoked." % (filename)); - bad = 1; - if keywords.has_key("BADSIG"): - reject("bad signature on %s." % (filename)); - bad = 1; - if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"): - reject("failed to check signature on %s." % (filename)); - bad = 1; - if keywords.has_key("NO_PUBKEY"): - reject("key used to sign %s not found in keyring." % (filename)); - bad = 1; - if keywords.has_key("BADARMOR"): - reject("ascii armour of signature was corrupt in %s." % (filename)); - bad = 1; - if keywords.has_key("NODATA"): - reject("no signature found in %s." % (filename)); - bad = 1; - - if bad: - return None; - - # Next check gpgv exited with a zero return code - if exit_status: - reject("gpgv failed while checking %s." % (filename)); - if string.strip(status): - reject(utils.prefix_multi_line_string(status, " [GPG status-fd output:] "), ""); - else: - reject(utils.prefix_multi_line_string(output, " [GPG output:] "), ""); - return None; - - # Sanity check the good stuff we expect - if not keywords.has_key("VALIDSIG"): - reject("signature on %s does not appear to be valid [No VALIDSIG]." % (filename)); - bad = 1; - else: - args = keywords["VALIDSIG"]; - if len(args) < 1: - reject("internal error while checking signature on %s." % (filename)); - bad = 1; - else: - fingerprint = args[0]; - if not keywords.has_key("GOODSIG"): - reject("signature on %s does not appear to be valid [No GOODSIG]." % (filename)); - bad = 1; - if not keywords.has_key("SIG_ID"): - reject("signature on %s does not appear to be valid [No SIG_ID]." % (filename)); - bad = 1; - - # Finally ensure there's not something we don't recognise - known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="", - SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="", - NODATA=""); - - for keyword in keywords.keys(): - if not known_keywords.has_key(keyword): - reject("found unknown status token '%s' from gpgv with args '%s' in %s." % (keyword, repr(keywords[keyword]), filename)); - bad = 1; - - if bad: - return None; - else: - return fingerprint; + reject_message += prefix + str + "\n"; ################################################################################ @@ -306,7 +139,7 @@ def copy_to_holding(filename): # Shouldn't happen, but will if, for example, someone lists a # file twice in the .changes. if errno.errorcode[e.errno] == 'EEXIST': - reject("%s already exists in holding area; can not overwrite." % (base_filename)); + reject("%s: already exists in holding area; can not overwrite." % (base_filename)); return; raise; @@ -317,17 +150,16 @@ def copy_to_holding(filename): # O_CREAT | O_EXCLed ghost file, so add the file to the list # of 'in holding' even if it's not the real file. if errno.errorcode[e.errno] == 'ENOENT': - reject("can not copy %s to holding area: file not found." % (base_filename)); + reject("%s: can not copy to holding area: file not found." % (base_filename)); os.unlink(dest); return; elif errno.errorcode[e.errno] == 'EACCES': - reject("can not copy %s to holding area: read permission denied." % (base_filename)); + reject("%s: can not copy to holding area: read permission denied." % (base_filename)); os.unlink(dest); return; raise; in_holding[base_filename] = ""; - return dest; ################################################################################ @@ -338,7 +170,7 @@ def clean_holding(): os.chdir(Cnf["Dir::Queue::Holding"]); for file in in_holding.keys(): if os.path.exists(file): - if string.find(file, '/') != -1: + if file.find('/') != -1: utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (file)); else: os.unlink(file); @@ -359,25 +191,25 @@ def check_changes(): try: changes.update(utils.parse_changes(filename)); except utils.cant_open_exc: - reject("can't read changes file '%s'." % (filename)); + reject("%s: can't read file." % (filename)); return 0; except utils.changes_parse_error_exc, line: - reject("error parsing changes file '%s', can't grok: %s." % (filename, line)); + reject("%s: parse error, can't grok: %s." % (filename, line)); return 0; # Parse the Files field from the .changes into another dictionary try: files.update(utils.build_file_list(changes)); except utils.changes_parse_error_exc, line: - reject("error parsing changes file '%s', can't grok: %s." % (filename, line)); + reject("%s: parse error, can't grok: %s." % (filename, line)); except utils.nk_format_exc, format: - reject("unknown format '%s' of changes file '%s'." % (format, filename)); + reject("%s: unknown format '%s'." % (filename, format)); return 0; # Check for mandatory fields for i in ("source", "binary", "architecture", "version", "distribution", "maintainer", "files"): if not changes.has_key(i): - reject("Missing field `%s' in changes file." % (i)); + reject("%s: Missing mandatory field `%s'." % (filename, i)); return 0 # Avoid errors during later tests # Split multi-value fields into a lower-level dictionary @@ -386,7 +218,7 @@ def check_changes(): if o != "": del changes[i] changes[i] = {} - for j in string.split(o): + for j in o.split(): changes[i][j] = 1 # Fix the Maintainer: field to be RFC822 compatible @@ -399,7 +231,7 @@ def check_changes(): if changes.has_key("closes"): for i in changes["closes"].keys(): if katie.re_isanum.match (i) == None: - reject("`%s' from Closes field isn't a number." % (i)); + reject("%s: `%s' from Closes field isn't a number." % (filename, i)); # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison) @@ -411,7 +243,12 @@ def check_changes(): base_filename = os.path.basename(filename); for dir in [ "Accepted", "Byhand", "Done", "New" ]: if os.path.exists(Cnf["Dir::Queue::%s" % (dir) ]+'/'+base_filename): - reject("a changes file with the same name already exists in the %s directory." % (dir)); + reject("%s: a file with this name already exists in the %s directory." % (base_filename, dir)); + + # Check the .changes is non-empty + if not files: + reject("%s: nothing to do (Files field is empty)." % (base_filename)) + return 0; return 1; @@ -422,7 +259,7 @@ def check_distributions(): # Handle suite mappings for map in Cnf.ValueList("SuiteMappings"): - args = string.split(map); + args = map.split(); type = args[0]; if type == "map" or type == "silent-map": (source, dest) = args[1:3]; @@ -474,6 +311,8 @@ def check_files(): os.chdir(cwd); reprocess = 0; + has_binaries = 0; + has_source = 0; for file in file_keys: # Ensure the file does not already exist in one of the accepted directories @@ -498,7 +337,8 @@ def check_files(): files[file]["byhand"] = 1; files[file]["type"] = "byhand"; # Checks for a binary package... - elif utils.re_isadeb.match(file) != None: + elif utils.re_isadeb.match(file): + has_binaries = 1; files[file]["type"] = "deb"; # Extract package control information @@ -544,30 +384,35 @@ def check_files(): if not changes["architecture"].has_key(architecture): reject("%s: control file lists arch as `%s', which isn't in changes file." % (file, architecture)); + # Sanity-check the Depends field + depends = control.Find("Depends"); + if depends == '': + reject("%s: Depends field is empty." % (file)); + # Check the section & priority match those given in the .changes (non-fatal) - if control.Find("Section") != None and files[file]["section"] != "" and files[file]["section"] != control.Find("Section"): + if control.Find("Section") and files[file]["section"] != "" and files[file]["section"] != control.Find("Section"): reject("%s control file lists section as `%s', but changes file has `%s'." % (file, control.Find("Section", ""), files[file]["section"]), "Warning: "); - if control.Find("Priority") != None and files[file]["priority"] != "" and files[file]["priority"] != control.Find("Priority"): + if control.Find("Priority") and files[file]["priority"] != "" and files[file]["priority"] != control.Find("Priority"): reject("%s control file lists priority as `%s', but changes file has `%s'." % (file, control.Find("Priority", ""), files[file]["priority"]),"Warning: "); files[file]["package"] = package; files[file]["architecture"] = architecture; files[file]["version"] = version; files[file]["maintainer"] = control.Find("Maintainer", ""); - if file[-5:] == ".udeb": + if file.endswith(".udeb"): files[file]["dbtype"] = "udeb"; - elif file[-4:] == ".deb": + elif file.endswith(".deb"): files[file]["dbtype"] = "deb"; else: reject("%s is neither a .deb or a .udeb." % (file)); files[file]["source"] = control.Find("Source", files[file]["package"]); # Get the source version source = files[file]["source"]; - source_version = "" - if string.find(source, "(") != -1: - m = utils.re_extract_src_version.match(source) - source = m.group(1) - source_version = m.group(2) + source_version = ""; + if source.find("(") != -1: + m = utils.re_extract_src_version.match(source); + source = m.group(1); + source_version = m.group(2); if not source_version: source_version = files[file]["version"]; files[file]["source package"] = source; @@ -597,7 +442,7 @@ def check_files(): reject("source version (%s) for %s doesn't match changes version %s." % (source_version, file, changes["version"])); else: # Check in the SQL database - if not Katie.source_exists(source_package, source_version): + if not Katie.source_exists(source_package, source_version, changes["distribution"].keys()): # Check in one of the other directories source_epochless_version = utils.re_no_epoch.sub('', source_version); dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version); @@ -613,7 +458,8 @@ def check_files(): # Checks for a source package... else: m = utils.re_issource.match(file); - if m != None: + if m: + has_source = 1; files[file]["package"] = m.group(1); files[file]["version"] = m.group(2); files[file]["type"] = m.group(3); @@ -636,7 +482,7 @@ def check_files(): # Check the signature of a .dsc file if files[file]["type"] == "dsc": - dsc["fingerprint"] = check_signature(file); + dsc["fingerprint"] = utils.check_signature(file, reject); files[file]["architecture"] = "source"; @@ -654,7 +500,7 @@ def check_files(): # Handle component mappings for map in Cnf.ValueList("ComponentMappings"): - (source, dest) = string.split(map); + (source, dest) = map.split(); if files[file]["component"] == source: files[file]["original component"] = source; files[file]["component"] = dest; @@ -662,7 +508,7 @@ def check_files(): if Cnf.has_key("Suite:%s::Components" % (suite)) and \ files[file]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)): reject("unknown component `%s' for suite `%s'." % (files[file]["component"], suite)); - continue + continue; # See if the package is NEW if not Katie.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file): @@ -676,7 +522,7 @@ def check_files(): continue; # Validate the priority - if string.find(files[file]["priority"],'/') != -1: + if files[file]["priority"].find('/') != -1: reject("file '%s' has invalid priority '%s' [contains '/']." % (file, files[file]["priority"])); # Determine the location @@ -711,116 +557,224 @@ SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, # If the .changes file says it has source, it must have source. if changes["architecture"].has_key("source"): - has_source = 0; - for file in file_keys: - if files[file]["type"] == "dsc": - has_source = 1; if not has_source: reject("no source found and Architecture line in changes mention source."); + if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"): + reject("source only uploads are not supported."); + ############################################################################### -def check_dsc (): +def check_dsc(): global reprocess; + # Ensure there is source to check + if not changes["architecture"].has_key("source"): + return; + + # Find the .dsc + dsc_filename = None; for file in files.keys(): - # The .orig.tar.gz can disappear out from under us is it's a - # duplicate of one in the archive. - if not files.has_key(file): - continue; if files[file]["type"] == "dsc": - # Parse the .dsc file - try: - dsc.update(utils.parse_changes(file, dsc_whitespace_rules=1)); - except utils.cant_open_exc: - # if not -n copy_to_holding() will have done this for us... - if Options["No-Action"]: - reject("can't read .dsc file '%s'." % (file)); - except utils.changes_parse_error_exc, line: - reject("error parsing .dsc file '%s', can't grok: %s." % (file, line)); - except utils.invalid_dsc_format_exc, line: - reject("syntax error in .dsc file '%s', line %s." % (file, line)); - # Build up the file list of files mentioned by the .dsc + if dsc_filename: + reject("can not process a .changes file with multiple .dsc's."); + return; + else: + dsc_filename = file; + + # If there isn't one, we have nothing to do. (We have reject()ed the upload already) + if not dsc_filename: + return; + + # Parse the .dsc file + try: + dsc.update(utils.parse_changes(dsc_filename, dsc_whitespace_rules=1)); + except utils.cant_open_exc: + # if not -n copy_to_holding() will have done this for us... + if Options["No-Action"]: + reject("%s: can't read file." % (dsc_filename)); + except utils.changes_parse_error_exc, line: + reject("%s: parse error, can't grok: %s." % (dsc_filename, line)); + except utils.invalid_dsc_format_exc, line: + reject("%s: syntax error on line %s." % (dsc_filename, line)); + # Build up the file list of files mentioned by the .dsc + try: + dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1)); + except utils.no_files_exc: + reject("%s: no Files: field." % (dsc_filename)); + return; + except utils.changes_parse_error_exc, line: + reject("%s: parse error, can't grok: %s." % (dsc_filename, line)); + return; + + # Enforce mandatory fields + for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"): + if not dsc.has_key(i): + reject("%s: missing mandatory field `%s'." % (dsc_filename, i)); + + # Validate the source and version fields + if dsc.has_key("source") and not re_valid_pkg_name.match(dsc["source"]): + reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"])); + if dsc.has_key("version") and not re_valid_version.match(dsc["version"]): + reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"])); + + # Bumping the version number of the .dsc breaks extraction by stable's + # dpkg-source. So let's not do that... + if dsc["format"] != "1.0": + reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename)); + + # Validate the build-depends field(s) + for field_name in [ "build-depends", "build-depends-indep" ]: + field = dsc.get(field_name); + if field: + # Check for broken dpkg-dev lossage... + if field.startswith("ARRAY"): + reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title())); + + # Have apt try to parse them... try: - dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1)); - except utils.no_files_exc: - reject("no Files: field in .dsc file."); - continue; - except utils.changes_parse_error_exc, line: - reject("error parsing .dsc file '%s', can't grok: %s." % (file, line)); - continue; + apt_pkg.ParseSrcDepends(field); + except: + reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title())); + pass; - # Enforce mandatory fields - for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"): - if not dsc.has_key(i): - reject("Missing field `%s' in dsc file." % (i)); - - # Validate the source and version fields - if dsc.has_key("source") and not re_valid_pkg_name.match(dsc["source"]): - reject("%s: invalid source name '%s'." % (file, dsc["source"])); - if dsc.has_key("version") and not re_valid_version.match(dsc["version"]): - reject("%s: invalid version number '%s'." % (file, dsc["version"])); - - # The dpkg maintainer from hell strikes again! Bumping the - # version number of the .dsc breaks extraction by stable's - # dpkg-source. - if dsc["format"] != "1.0": - reject("""[dpkg-sucks] source package was produced by a broken version - of dpkg-dev 1.9.1{3,4}; please rebuild with >= 1.9.15 version - installed."""); - - # Ensure the version number in the .dsc matches the version number in the .changes - epochless_dsc_version = utils.re_no_epoch.sub('', dsc.get("version")); - changes_version = files[file]["version"]; - if epochless_dsc_version != files[file]["version"]: - reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version)); - - # Ensure there is a .tar.gz in the .dsc file - has_tar = 0; - for f in dsc_files.keys(): - m = utils.re_issource.match(f); - if not m: - reject("%s mentioned in the Files field of %s not recognised as source." % (f, file)); - type = m.group(3); - if type == "orig.tar.gz" or type == "tar.gz": - has_tar = 1; - if not has_tar: - reject("no .tar.gz or .orig.tar.gz listed in the Files field of %s." % (file)); - - # Ensure source is newer than existing source in target suites - reject(Katie.check_source_against_db(file),""); - - (reject_msg, is_in_incoming) = Katie.check_dsc_against_db(file); - reject(reject_msg, ""); - if is_in_incoming: - if not Options["No-Action"]: - copy_to_holding(is_in_incoming); - orig_tar_gz = os.path.basename(is_in_incoming); - files[orig_tar_gz] = {}; - files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]; - files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]; - files[orig_tar_gz]["section"] = files[file]["section"]; - files[orig_tar_gz]["priority"] = files[file]["priority"]; - files[orig_tar_gz]["component"] = files[file]["component"]; - files[orig_tar_gz]["type"] = "orig.tar.gz"; - reprocess = 2; + # Ensure the version number in the .dsc matches the version number in the .changes + epochless_dsc_version = utils.re_no_epoch.sub('', dsc.get("version")); + changes_version = files[dsc_filename]["version"]; + if epochless_dsc_version != files[dsc_filename]["version"]: + reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version)); + + # Ensure there is a .tar.gz in the .dsc file + has_tar = 0; + for f in dsc_files.keys(): + m = utils.re_issource.match(f); + if not m: + reject("%s: %s in Files field not recognised as source." % (dsc_filename, f)); + type = m.group(3); + if type == "orig.tar.gz" or type == "tar.gz": + has_tar = 1; + if not has_tar: + reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename)); + + # Ensure source is newer than existing source in target suites + reject(Katie.check_source_against_db(dsc_filename),""); + + (reject_msg, is_in_incoming) = Katie.check_dsc_against_db(dsc_filename); + reject(reject_msg, ""); + if is_in_incoming: + if not Options["No-Action"]: + copy_to_holding(is_in_incoming); + orig_tar_gz = os.path.basename(is_in_incoming); + files[orig_tar_gz] = {}; + files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]; + files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]; + files[orig_tar_gz]["section"] = files[dsc_filename]["section"]; + files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]; + files[orig_tar_gz]["component"] = files[dsc_filename]["component"]; + files[orig_tar_gz]["type"] = "orig.tar.gz"; + reprocess = 2; ################################################################################ -# Some cunning stunt broke dpkg-source in dpkg 1.8{,.1}; detect the -# resulting bad source packages and reject them. +def get_changelog_versions(source_dir): + """Extracts a the source package and (optionally) grabs the + version history out of debian/changelog for the BTS.""" + + # Find the .dsc (again) + dsc_filename = None; + for file in files.keys(): + if files[file]["type"] == "dsc": + dsc_filename = file; + + # If there isn't one, we have nothing to do. (We have reject()ed the upload already) + if not dsc_filename: + return; + + # Create a symlink mirror of the source files in our temporary directory + for f in files.keys(): + m = utils.re_issource.match(f); + if m: + src = os.path.join(source_dir, f); + # If a file is missing for whatever reason, give up. + if not os.path.exists(src): + return; + type = m.group(3); + if type == "orig.tar.gz" and pkg.orig_tar_gz: + continue; + else: + dest = os.path.join(os.getcwd(), f); + os.symlink(src, dest); + + # If the orig.tar.gz is not a part of the upload, create a symlink to the + # existing copy. + if pkg.orig_tar_gz: + dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz)); + os.symlink(pkg.orig_tar_gz, dest); + + # Extract the source + cmd = "dpkg-source -sn -x %s" % (dsc_filename); + (result, output) = commands.getstatusoutput(cmd); + if (result != 0): + reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result)); + reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), ""); + return; + + if not Cnf.Find("Dir::Queue::BTSVersionTrack"): + return; + + # Get the upstream version + upstr_version = utils.re_no_epoch.sub('', dsc["version"]); + if re_strip_revision.search(upstr_version): + upstr_version = re_strip_revision.sub('', upstr_version); + + # Ensure the changelog file exists + changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version); + if not os.path.exists(changelog_filename): + reject("%s: debian/changelog not found in extracted source." % (dsc_filename)); + return; + + # Parse the changelog + dsc["bts changelog"] = ""; + changelog_file = utils.open_file(changelog_filename); + for line in changelog_file.readlines(): + m = re_changelog_versions.match(line); + if m: + dsc["bts changelog"] += line; + changelog_file.close(); + + # Check we found at least one revision in the changelog + if not dsc["bts changelog"]: + reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename)); + +######################################## + +def check_source(): + # Bail out if: + # a) there's no source + # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files' + # or c) the orig.tar.gz is MIA + if not changes["architecture"].has_key("source") or reprocess == 2 \ + or pkg.orig_tar_gz == -1: + return; + + # Create a temporary directory to extract the source into + if Options["No-Action"]: + tmpdir = tempfile.mktemp(); + else: + # We're in queue/holding and can create a random directory. + tmpdir = "%s" % (os.getpid()); + os.mkdir(tmpdir); -# Even more amusingly the fix in 1.8.1.1 didn't actually fix the -# problem just changed the symptoms. + # Move into the temporary directory + cwd = os.getcwd(); + os.chdir(tmpdir); -def check_diff (): - for filename in files.keys(): - if files[filename]["type"] == "diff.gz": - file = gzip.GzipFile(filename, 'r'); - for line in file.readlines(): - if re_bad_diff.search(line): - reject("[dpkg-sucks] source package was produced by a broken version of dpkg-dev 1.8.x; please rebuild with >= 1.8.3 version installed."); - break; + # Get the changelog version history + get_changelog_versions(cwd); + + # Move back and cleanup the temporary tree + os.chdir(cwd); + shutil.rmtree(tmpdir); ################################################################################ @@ -833,7 +787,7 @@ def check_urgency (): if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"): reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: "); changes["urgency"] = Cnf["Urgency::Default"]; - changes["urgency"] = string.lower(changes["urgency"]); + changes["urgency"] = changes["urgency"].lower(); ################################################################################ @@ -842,11 +796,35 @@ def check_md5sums (): try: file_handle = utils.open_file(file); except utils.cant_open_exc: - pass; - else: - if apt_pkg.md5sum(file_handle) != files[file]["md5sum"]: - reject("md5sum check failed for %s." % (file)); - file_handle.close(); + continue; + + # Check md5sum + if apt_pkg.md5sum(file_handle) != files[file]["md5sum"]: + reject("%s: md5sum check failed." % (file)); + file_handle.close(); + # Check size + actual_size = os.stat(file)[stat.ST_SIZE]; + size = int(files[file]["size"]); + if size != actual_size: + reject("%s: actual file size (%s) does not match size (%s) in .changes" + % (file, actual_size, size)); + + for file in dsc_files.keys(): + try: + file_handle = utils.open_file(file); + except utils.cant_open_exc: + continue; + + # Check md5sum + if apt_pkg.md5sum(file_handle) != dsc_files[file]["md5sum"]: + reject("%s: md5sum check failed." % (file)); + file_handle.close(); + # Check size + actual_size = os.stat(file)[stat.ST_SIZE]; + size = int(dsc_files[file]["size"]); + if size != actual_size: + reject("%s: actual file size (%s) does not match size (%s) in .dsc" + % (file, actual_size, size)); ################################################################################ @@ -903,7 +881,7 @@ def check_timestamps(): % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date))); except: - reject("%s: timestamp check failed; caught %s" % (filename, sys.exc_type)); + reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value)); ################################################################################ ################################################################################ @@ -951,7 +929,7 @@ def action (): if Options["No-Action"] or Options["Automatic"]: answer = 'S' - if string.find(reject_message, "Rejected") != -1: + if reject_message.find("Rejected") != -1: if upload_too_new(): print "SKIP (too new)\n" + reject_message,; prompt = "[S]kip, Quit ?"; @@ -961,7 +939,7 @@ def action (): if Options["Automatic"]: answer = 'R'; elif new: - print "NEW to %s\n%s%s" % (string.join(changes["distribution"].keys(), ", "), reject_message, summary),; + print "NEW to %s\n%s%s" % (", ".join(changes["distribution"].keys()), reject_message, summary),; prompt = "[N]ew, Skip, Quit ?"; if Options["Automatic"]: answer = 'N'; @@ -976,12 +954,12 @@ def action (): if Options["Automatic"]: answer = 'A'; - while string.find(prompt, answer) == -1: + while prompt.find(answer) == -1: answer = utils.our_raw_input(prompt); m = katie.re_default_answer.match(prompt); if answer == "": answer = m.group(1); - answer = string.upper(answer[:1]); + answer = answer[:1].upper(); if answer == 'R': os.chdir (pkg.directory); @@ -1053,7 +1031,7 @@ def acknowledge_new (summary): print "Sending new ack."; Subst["__SUMMARY__"] = summary; new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.new"); - utils.send_mail(new_ack_message,""); + utils.send_mail(new_ack_message); # Finally remove the originals. os.chdir (pkg.directory); @@ -1097,15 +1075,15 @@ def process_it (changes_file): # Relativize the filename so we use the copy in holding # rather than the original... pkg.changes_file = os.path.basename(pkg.changes_file); - changes["fingerprint"] = check_signature(pkg.changes_file); + changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject); changes_valid = check_changes(); if changes_valid: while reprocess: check_distributions(); check_files(); - check_md5sums(); check_dsc(); - check_diff(); + check_source(); + check_md5sums(); check_urgency(); check_timestamps(); Katie.update_subst(reject_message); @@ -1123,24 +1101,17 @@ def process_it (changes_file): ############################################################################### def main(): - global Cnf, Options, Logger, nmu; + global Cnf, Options, Logger; changes_files = init(); - if Options["Help"]: - usage(); - - if Options["Version"]: - print "jennifer %s" % (jennifer_version); - sys.exit(0); - # -n/--dry-run invalidates some other options which would involve things happening if Options["No-Action"]: Options["Automatic"] = ""; # Ensure all the arguments we were given are .changes files for file in changes_files: - if file[-8:] != ".changes": + if not file.endswith(".changes"): utils.warn("Ignoring '%s' because it's not a .changes file." % (file)); changes_files.remove(file); @@ -1156,7 +1127,13 @@ def main(): if not Options["No-Action"]: lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT); - fcntl.lockf(lock_fd, FCNTL.F_TLOCK); + try: + fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB); + except IOError, e: + if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN': + utils.fubar("Couldn't obtain lock; assuming another jennifer is already running."); + else: + raise; Logger = Katie.Logger = logging.Logger(Cnf, "jennifer"); # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header @@ -1184,7 +1161,7 @@ def main(): if accept_count: sets = "set" if accept_count > 1: - sets = "sets" + sets = "sets"; print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes))); Logger.log(["total",accept_count,accept_bytes]);