X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fprocess_unchecked.py;h=2a479ced62e457ff29a897b08901dcf0270cba32;hb=d36e2e1219a6c9ad9110bbdb779ca61dfb444c2c;hp=a958d23eb98bca3a92ba92e76fd3ca46e509ab59;hpb=7a7563b8e19f99783505669ba603ef2d534c6603;p=dak.git diff --git a/dak/process_unchecked.py b/dak/process_unchecked.py index a958d23e..2a479ced 100755 --- a/dak/process_unchecked.py +++ b/dak/process_unchecked.py @@ -30,14 +30,13 @@ import commands, errno, fcntl, os, re, shutil, stat, sys, time, tempfile, traceback import apt_inst, apt_pkg -import daklib.database -import daklib.logging -import daklib.queue -import daklib.utils +from daklib import database +from daklib import logging +from daklib import queue +from daklib import utils +from daklib.dak_exceptions import * from types import * -from syck import * - ################################################################################ @@ -46,6 +45,7 @@ re_valid_pkg_name = re.compile(r"^[\dA-Za-z][\dA-Za-z\+\-\.]+$") re_changelog_versions = re.compile(r"^\w[-+0-9a-z.]+ \([^\(\) \t]+\)") re_strip_revision = re.compile(r"-([^-]+)$") re_strip_srcver = re.compile(r"\s+\(\S+\)$") +re_spacestrip = re.compile('(\s)') ################################################################################ @@ -74,7 +74,7 @@ def init(): apt_pkg.init() Cnf = apt_pkg.newConfiguration() - apt_pkg.ReadConfigFileISC(Cnf,daklib.utils.which_conf_file()) + apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file()) Arguments = [('a',"automatic","Dinstall::Options::Automatic"), ('h',"help","Dinstall::Options::Help"), @@ -92,7 +92,7 @@ def init(): if Options["Help"]: usage() - Upload = daklib.queue.Upload(Cnf) + Upload = queue.Upload(Cnf) changes = Upload.pkg.changes dsc = Upload.pkg.dsc @@ -165,12 +165,12 @@ def clean_holding(): cwd = os.getcwd() os.chdir(Cnf["Dir::Queue::Holding"]) - for file in in_holding.keys(): - if os.path.exists(file): - if file.find('/') != -1: - daklib.utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (file)) + for f in in_holding.keys(): + if os.path.exists(f): + if f.find('/') != -1: + utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f)) else: - os.unlink(file) + os.unlink(f) in_holding = {} os.chdir(cwd) @@ -181,20 +181,20 @@ def check_changes(): # Parse the .changes field into a dictionary try: - changes.update(daklib.utils.parse_changes(filename)) - except daklib.utils.cant_open_exc: + changes.update(utils.parse_changes(filename)) + except CantOpenError: reject("%s: can't read file." % (filename)) return 0 - except daklib.utils.changes_parse_error_exc, line: + except ParseChangesError, line: reject("%s: parse error, can't grok: %s." % (filename, line)) return 0 # Parse the Files field from the .changes into another dictionary try: - files.update(daklib.utils.build_file_list(changes)) - except daklib.utils.changes_parse_error_exc, line: + files.update(utils.build_file_list(changes)) + except ParseChangesError, line: reject("%s: parse error, can't grok: %s." % (filename, line)) - except daklib.utils.nk_format_exc, format: + except UnknownFormatError, format: reject("%s: unknown format '%s'." % (filename, format)) return 0 @@ -207,7 +207,7 @@ def check_changes(): # Strip a source version in brackets from the source field if re_strip_srcver.search(changes["source"]): - changes["source"] = re_strip_srcver.sub('', changes["source"]) + changes["source"] = re_strip_srcver.sub('', changes["source"]) # Ensure the source field is a valid package name. if not re_valid_pkg_name.match(changes["source"]): @@ -226,8 +226,8 @@ def check_changes(): try: (changes["maintainer822"], changes["maintainer2047"], changes["maintainername"], changes["maintaineremail"]) = \ - daklib.utils.fix_maintainer (changes["maintainer"]) - except daklib.utils.ParseMaintError, msg: + utils.fix_maintainer (changes["maintainer"]) + except ParseMaintError, msg: reject("%s: Maintainer field ('%s') failed to parse: %s" \ % (filename, changes["maintainer"], msg)) @@ -235,31 +235,31 @@ def check_changes(): try: (changes["changedby822"], changes["changedby2047"], changes["changedbyname"], changes["changedbyemail"]) = \ - daklib.utils.fix_maintainer (changes.get("changed-by", "")) - except daklib.utils.ParseMaintError, msg: + utils.fix_maintainer (changes.get("changed-by", "")) + except ParseMaintError, msg: (changes["changedby822"], changes["changedby2047"], changes["changedbyname"], changes["changedbyemail"]) = \ - ("", "", "", "") + ("", "", "", "") reject("%s: Changed-By field ('%s') failed to parse: %s" \ % (filename, changes["changed-by"], msg)) # Ensure all the values in Closes: are numbers if changes.has_key("closes"): for i in changes["closes"].keys(): - if daklib.queue.re_isanum.match (i) == None: + if queue.re_isanum.match (i) == None: reject("%s: `%s' from Closes field isn't a number." % (filename, i)) # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison) - changes["chopversion"] = daklib.utils.re_no_epoch.sub('', changes["version"]) - changes["chopversion2"] = daklib.utils.re_no_revision.sub('', changes["chopversion"]) + changes["chopversion"] = utils.re_no_epoch.sub('', changes["version"]) + changes["chopversion2"] = utils.re_no_revision.sub('', changes["chopversion"]) # Check there isn't already a changes file of the same name in one # of the queue directories. base_filename = os.path.basename(filename) - for dir in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]: - if os.path.exists(Cnf["Dir::Queue::%s" % (dir) ]+'/'+base_filename): - reject("%s: a file with this name already exists in the %s directory." % (base_filename, dir)) + for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]: + if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename): + reject("%s: a file with this name already exists in the %s directory." % (base_filename, d)) # Check the .changes is non-empty if not files: @@ -274,20 +274,20 @@ def check_distributions(): "Check and map the Distribution field of a .changes file." # Handle suite mappings - for map in Cnf.ValueList("SuiteMappings"): - args = map.split() - type = args[0] - if type == "map" or type == "silent-map": + for m in Cnf.ValueList("SuiteMappings"): + args = m.split() + mtype = args[0] + if mtype == "map" or mtype == "silent-map": (source, dest) = args[1:3] if changes["distribution"].has_key(source): del changes["distribution"][source] changes["distribution"][dest] = 1 - if type != "silent-map": + if mtype != "silent-map": reject("Mapping %s to %s." % (source, dest),"") if changes.has_key("distribution-version"): if changes["distribution-version"].has_key(source): changes["distribution-version"][source]=dest - elif type == "map-unreleased": + elif mtype == "map-unreleased": (source, dest) = args[1:3] if changes["distribution"].has_key(source): for arch in changes["architecture"].keys(): @@ -296,16 +296,16 @@ def check_distributions(): del changes["distribution"][source] changes["distribution"][dest] = 1 break - elif type == "ignore": + elif mtype == "ignore": suite = args[1] if changes["distribution"].has_key(suite): del changes["distribution"][suite] reject("Ignoring %s as a target suite." % (suite), "Warning: ") - elif type == "reject": + elif mtype == "reject": suite = args[1] if changes["distribution"].has_key(suite): reject("Uploads to %s are not accepted." % (suite)) - elif type == "propup-version": + elif mtype == "propup-version": # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes" # # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'} @@ -324,7 +324,7 @@ def check_distributions(): ################################################################################ -def check_deb_ar(filename, control): +def check_deb_ar(filename): """Sanity check the ar of a .deb, i.e. that there is: o debian-binary @@ -336,7 +336,7 @@ in that order, and nothing else.""" (result, output) = commands.getstatusoutput(cmd) if result != 0: reject("%s: 'ar t' invocation failed." % (filename)) - reject(daklib.utils.prefix_multi_line_string(output, " [ar output:] "), "") + reject(utils.prefix_multi_line_string(output, " [ar output:] "), "") chunks = output.split('\n') if len(chunks) != 3: reject("%s: found %d chunks, expected 3." % (filename, len(chunks))) @@ -352,7 +352,7 @@ in that order, and nothing else.""" def check_files(): global reprocess - archive = daklib.utils.where_am_i() + archive = utils.where_am_i() file_keys = files.keys() # if reprocess is 2 we've already done this and we're checking @@ -361,8 +361,8 @@ def check_files(): if not Options["No-Action"] and reprocess < 2: cwd = os.getcwd() os.chdir(pkg.directory) - for file in file_keys: - copy_to_holding(file) + for f in file_keys: + copy_to_holding(f) os.chdir(cwd) # Check there isn't already a .changes or .dak file of the same name in @@ -387,40 +387,40 @@ def check_files(): has_binaries = 0 has_source = 0 - for file in file_keys: + for f in file_keys: # Ensure the file does not already exist in one of the accepted directories - for dir in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]: - if not Cnf.has_key("Dir::Queue::%s" % (dir)): continue - if os.path.exists(Cnf["Dir::Queue::%s" % (dir) ]+'/'+file): - reject("%s file already exists in the %s directory." % (file, dir)) - if not daklib.utils.re_taint_free.match(file): - reject("!!WARNING!! tainted filename: '%s'." % (file)) + for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]: + if not Cnf.has_key("Dir::Queue::%s" % (d)): continue + if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f): + reject("%s file already exists in the %s directory." % (f, d)) + if not utils.re_taint_free.match(f): + reject("!!WARNING!! tainted filename: '%s'." % (f)) # Check the file is readable - if os.access(file,os.R_OK) == 0: + if os.access(f, os.R_OK) == 0: # When running in -n, copy_to_holding() won't have # generated the reject_message, so we need to. if Options["No-Action"]: - if os.path.exists(file): - reject("Can't read `%s'. [permission denied]" % (file)) + if os.path.exists(f): + reject("Can't read `%s'. [permission denied]" % (f)) else: - reject("Can't read `%s'. [file not found]" % (file)) - files[file]["type"] = "unreadable" + reject("Can't read `%s'. [file not found]" % (f)) + files[f]["type"] = "unreadable" continue # If it's byhand skip remaining checks - if files[file]["section"] == "byhand" or files[file]["section"][:4] == "raw-": - files[file]["byhand"] = 1 - files[file]["type"] = "byhand" + if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-": + files[f]["byhand"] = 1 + files[f]["type"] = "byhand" # Checks for a binary package... - elif daklib.utils.re_isadeb.match(file): + elif utils.re_isadeb.match(f): has_binaries = 1 - files[file]["type"] = "deb" + files[f]["type"] = "deb" # Extract package control information - deb_file = daklib.utils.open_file(file) + deb_file = utils.open_file(f) try: control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file)) except: - reject("%s: debExtractControl() raised %s." % (file, sys.exc_type)) + reject("%s: debExtractControl() raised %s." % (f, sys.exc_type)) deb_file.close() # Can't continue, none of the checks on control would work. continue @@ -429,201 +429,214 @@ def check_files(): # Check for mandatory fields for field in [ "Package", "Architecture", "Version" ]: if control.Find(field) == None: - reject("%s: No %s field in control." % (file, field)) + reject("%s: No %s field in control." % (f, field)) # Can't continue continue # Ensure the package name matches the one give in the .changes if not changes["binary"].has_key(control.Find("Package", "")): - reject("%s: control file lists name as `%s', which isn't in changes file." % (file, control.Find("Package", ""))) + reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", ""))) # Validate the package field package = control.Find("Package") if not re_valid_pkg_name.match(package): - reject("%s: invalid package name '%s'." % (file, package)) + reject("%s: invalid package name '%s'." % (f, package)) # Validate the version field version = control.Find("Version") if not re_valid_version.match(version): - reject("%s: invalid version number '%s'." % (file, version)) + reject("%s: invalid version number '%s'." % (f, version)) # Ensure the architecture of the .deb is one we know about. default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable") architecture = control.Find("Architecture") - if architecture not in Cnf.ValueList("Suite::%s::Architectures" % (default_suite)): + upload_suite = changes["distribution"].keys()[0] + if architecture not in Cnf.ValueList("Suite::%s::Architectures" % (default_suite)) and architecture not in Cnf.ValueList("Suite::%s::Architectures" % (upload_suite)): reject("Unknown architecture '%s'." % (architecture)) # Ensure the architecture of the .deb is one of the ones # listed in the .changes. if not changes["architecture"].has_key(architecture): - reject("%s: control file lists arch as `%s', which isn't in changes file." % (file, architecture)) + reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture)) # Sanity-check the Depends field depends = control.Find("Depends") if depends == '': - reject("%s: Depends field is empty." % (file)) + reject("%s: Depends field is empty." % (f)) + + # Sanity-check the Provides field + provides = control.Find("Provides") + if provides: + provide = re_spacestrip.sub('', provides) + if provide == '': + reject("%s: Provides field is empty." % (f)) + prov_list = provide.split(",") + for prov in prov_list: + if not re_valid_pkg_name.match(prov): + reject("%s: Invalid Provides field content %s." % (f, prov)) + # Check the section & priority match those given in the .changes (non-fatal) - if control.Find("Section") and files[file]["section"] != "" and files[file]["section"] != control.Find("Section"): - reject("%s control file lists section as `%s', but changes file has `%s'." % (file, control.Find("Section", ""), files[file]["section"]), "Warning: ") - if control.Find("Priority") and files[file]["priority"] != "" and files[file]["priority"] != control.Find("Priority"): - reject("%s control file lists priority as `%s', but changes file has `%s'." % (file, control.Find("Priority", ""), files[file]["priority"]),"Warning: ") - - files[file]["package"] = package - files[file]["architecture"] = architecture - files[file]["version"] = version - files[file]["maintainer"] = control.Find("Maintainer", "") - if file.endswith(".udeb"): - files[file]["dbtype"] = "udeb" - elif file.endswith(".deb"): - files[file]["dbtype"] = "deb" + if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"): + reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ") + if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"): + reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ") + + files[f]["package"] = package + files[f]["architecture"] = architecture + files[f]["version"] = version + files[f]["maintainer"] = control.Find("Maintainer", "") + if f.endswith(".udeb"): + files[f]["dbtype"] = "udeb" + elif f.endswith(".deb"): + files[f]["dbtype"] = "deb" else: - reject("%s is neither a .deb or a .udeb." % (file)) - files[file]["source"] = control.Find("Source", files[file]["package"]) + reject("%s is neither a .deb or a .udeb." % (f)) + files[f]["source"] = control.Find("Source", files[f]["package"]) # Get the source version - source = files[file]["source"] + source = files[f]["source"] source_version = "" if source.find("(") != -1: - m = daklib.utils.re_extract_src_version.match(source) + m = utils.re_extract_src_version.match(source) source = m.group(1) source_version = m.group(2) if not source_version: - source_version = files[file]["version"] - files[file]["source package"] = source - files[file]["source version"] = source_version + source_version = files[f]["version"] + files[f]["source package"] = source + files[f]["source version"] = source_version # Ensure the filename matches the contents of the .deb - m = daklib.utils.re_isadeb.match(file) + m = utils.re_isadeb.match(f) # package name file_package = m.group(1) - if files[file]["package"] != file_package: - reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (file, file_package, files[file]["dbtype"], files[file]["package"])) - epochless_version = daklib.utils.re_no_epoch.sub('', control.Find("Version")) + if files[f]["package"] != file_package: + reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"])) + epochless_version = utils.re_no_epoch.sub('', control.Find("Version")) # version file_version = m.group(2) if epochless_version != file_version: - reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (file, file_version, files[file]["dbtype"], epochless_version)) + reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version)) # architecture file_architecture = m.group(3) - if files[file]["architecture"] != file_architecture: - reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (file, file_architecture, files[file]["dbtype"], files[file]["architecture"])) + if files[f]["architecture"] != file_architecture: + reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"])) # Check for existent source - source_version = files[file]["source version"] - source_package = files[file]["source package"] + source_version = files[f]["source version"] + source_package = files[f]["source package"] if changes["architecture"].has_key("source"): if source_version != changes["version"]: - reject("source version (%s) for %s doesn't match changes version %s." % (source_version, file, changes["version"])) + reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"])) else: # Check in the SQL database if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()): # Check in one of the other directories - source_epochless_version = daklib.utils.re_no_epoch.sub('', source_version) + source_epochless_version = utils.re_no_epoch.sub('', source_version) dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version) if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename): - files[file]["byhand"] = 1 + files[f]["byhand"] = 1 elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename): - files[file]["new"] = 1 + files[f]["new"] = 1 else: - dsc_file_exists = 0 + dsc_file_exists = 0 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]: - if Cnf.has_key("Dir::Queue::%s" % (myq)): - if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename): - dsc_file_exists = 1 - break - if not dsc_file_exists: - reject("no source found for %s %s (%s)." % (source_package, source_version, file)) + if Cnf.has_key("Dir::Queue::%s" % (myq)): + if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename): + dsc_file_exists = 1 + break + if not dsc_file_exists: + reject("no source found for %s %s (%s)." % (source_package, source_version, f)) # Check the version and for file overwrites - reject(Upload.check_binary_against_db(file),"") + reject(Upload.check_binary_against_db(f),"") - check_deb_ar(file, control) + check_deb_ar(f) # Checks for a source package... else: - m = daklib.utils.re_issource.match(file) + m = utils.re_issource.match(f) if m: has_source = 1 - files[file]["package"] = m.group(1) - files[file]["version"] = m.group(2) - files[file]["type"] = m.group(3) + files[f]["package"] = m.group(1) + files[f]["version"] = m.group(2) + files[f]["type"] = m.group(3) # Ensure the source package name matches the Source filed in the .changes - if changes["source"] != files[file]["package"]: - reject("%s: changes file doesn't say %s for Source" % (file, files[file]["package"])) + if changes["source"] != files[f]["package"]: + reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"])) # Ensure the source version matches the version in the .changes file - if files[file]["type"] == "orig.tar.gz": + if files[f]["type"] == "orig.tar.gz": changes_version = changes["chopversion2"] else: changes_version = changes["chopversion"] - if changes_version != files[file]["version"]: - reject("%s: should be %s according to changes file." % (file, changes_version)) + if changes_version != files[f]["version"]: + reject("%s: should be %s according to changes file." % (f, changes_version)) # Ensure the .changes lists source in the Architecture field if not changes["architecture"].has_key("source"): - reject("%s: changes file doesn't list `source' in Architecture field." % (file)) + reject("%s: changes file doesn't list `source' in Architecture field." % (f)) # Check the signature of a .dsc file - if files[file]["type"] == "dsc": - dsc["fingerprint"] = daklib.utils.check_signature(file, reject) + if files[f]["type"] == "dsc": + dsc["fingerprint"] = utils.check_signature(f, reject) - files[file]["architecture"] = "source" + files[f]["architecture"] = "source" # Not a binary or source package? Assume byhand... else: - files[file]["byhand"] = 1 - files[file]["type"] = "byhand" + files[f]["byhand"] = 1 + files[f]["type"] = "byhand" # Per-suite file checks - files[file]["oldfiles"] = {} + files[f]["oldfiles"] = {} for suite in changes["distribution"].keys(): # Skip byhand - if files[file].has_key("byhand"): + if files[f].has_key("byhand"): continue # Handle component mappings - for map in Cnf.ValueList("ComponentMappings"): - (source, dest) = map.split() - if files[file]["component"] == source: - files[file]["original component"] = source - files[file]["component"] = dest + for m in Cnf.ValueList("ComponentMappings"): + (source, dest) = m.split() + if files[f]["component"] == source: + files[f]["original component"] = source + files[f]["component"] = dest # Ensure the component is valid for the target suite if Cnf.has_key("Suite:%s::Components" % (suite)) and \ - files[file]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)): - reject("unknown component `%s' for suite `%s'." % (files[file]["component"], suite)) + files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)): + reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite)) continue # Validate the component - component = files[file]["component"] - component_id = daklib.database.get_component_id(component) + component = files[f]["component"] + component_id = database.get_component_id(component) if component_id == -1: - reject("file '%s' has unknown component '%s'." % (file, component)) + reject("file '%s' has unknown component '%s'." % (f, component)) continue # See if the package is NEW - if not Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file): - files[file]["new"] = 1 + if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f): + files[f]["new"] = 1 # Validate the priority - if files[file]["priority"].find('/') != -1: - reject("file '%s' has invalid priority '%s' [contains '/']." % (file, files[file]["priority"])) + if files[f]["priority"].find('/') != -1: + reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"])) # Determine the location location = Cnf["Dir::Pool"] - location_id = daklib.database.get_location_id (location, component, archive) + location_id = database.get_location_id (location, component, archive) if location_id == -1: reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive)) - files[file]["location id"] = location_id + files[f]["location id"] = location_id # Check the md5sum & size against existing files (if any) - files[file]["pool name"] = daklib.utils.poolify (changes["source"], files[file]["component"]) - files_id = daklib.database.get_files_id(files[file]["pool name"] + file, files[file]["size"], files[file]["md5sum"], files[file]["location id"]) + files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"]) + files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"]) if files_id == -1: - reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (file)) + reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f)) elif files_id == -2: - reject("md5sum and/or size mismatch on existing copy of %s." % (file)) - files[file]["files id"] = files_id + reject("md5sum and/or size mismatch on existing copy of %s." % (f)) + files[f]["files id"] = files_id # Check for packages that have moved from one component to another q = Upload.projectB.query(""" @@ -633,11 +646,11 @@ SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, AND (a.arch_string = '%s' OR a.arch_string = 'all') AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND b.file = f.id""" - % (files[file]["package"], suite, - files[file]["architecture"])) + % (files[f]["package"], suite, + files[f]["architecture"])) ql = q.getresult() if ql: - files[file]["othercomponents"] = ql[0][0] + files[f]["othercomponents"] = ql[0][0] # If the .changes file says it has source, it must have source. if changes["architecture"].has_key("source"): @@ -658,13 +671,13 @@ def check_dsc(): # Find the .dsc dsc_filename = None - for file in files.keys(): - if files[file]["type"] == "dsc": + for f in files.keys(): + if files[f]["type"] == "dsc": if dsc_filename: reject("can not process a .changes file with multiple .dsc's.") return 0 else: - dsc_filename = file + dsc_filename = f # If there isn't one, we have nothing to do. (We have reject()ed the upload already) if not dsc_filename: @@ -673,22 +686,25 @@ def check_dsc(): # Parse the .dsc file try: - dsc.update(daklib.utils.parse_changes(dsc_filename, signing_rules=1)) - except daklib.utils.cant_open_exc: + dsc.update(utils.parse_changes(dsc_filename, signing_rules=1)) + except CantOpenError: # if not -n copy_to_holding() will have done this for us... if Options["No-Action"]: reject("%s: can't read file." % (dsc_filename)) - except daklib.utils.changes_parse_error_exc, line: + except ParseChangesError, line: reject("%s: parse error, can't grok: %s." % (dsc_filename, line)) - except daklib.utils.invalid_dsc_format_exc, line: + except InvalidDscError, line: reject("%s: syntax error on line %s." % (dsc_filename, line)) # Build up the file list of files mentioned by the .dsc try: - dsc_files.update(daklib.utils.build_file_list(dsc, is_a_dsc=1)) - except daklib.utils.no_files_exc: + dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1)) + except NoFilesFieldError: reject("%s: no Files: field." % (dsc_filename)) return 0 - except daklib.utils.changes_parse_error_exc, line: + except UnknownFormatError, format: + reject("%s: unknown format '%s'." % (dsc_filename, format)) + return 0 + except ParseChangesError, line: reject("%s: parse error, can't grok: %s." % (dsc_filename, line)) return 0 @@ -711,8 +727,8 @@ def check_dsc(): # Validate the Maintainer field try: - daklib.utils.fix_maintainer (dsc["maintainer"]) - except daklib.utils.ParseMaintError, msg: + utils.fix_maintainer (dsc["maintainer"]) + except ParseMaintError, msg: reject("%s: Maintainer field ('%s') failed to parse: %s" \ % (dsc_filename, dsc["maintainer"], msg)) @@ -732,7 +748,7 @@ def check_dsc(): pass # Ensure the version number in the .dsc matches the version number in the .changes - epochless_dsc_version = daklib.utils.re_no_epoch.sub('', dsc["version"]) + epochless_dsc_version = utils.re_no_epoch.sub('', dsc["version"]) changes_version = files[dsc_filename]["version"] if epochless_dsc_version != files[dsc_filename]["version"]: reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version)) @@ -740,12 +756,12 @@ def check_dsc(): # Ensure there is a .tar.gz in the .dsc file has_tar = 0 for f in dsc_files.keys(): - m = daklib.utils.re_issource.match(f) + m = utils.re_issource.match(f) if not m: reject("%s: %s in Files field not recognised as source." % (dsc_filename, f)) - continue - type = m.group(3) - if type == "orig.tar.gz" or type == "tar.gz": + continue + ftype = m.group(3) + if ftype == "orig.tar.gz" or ftype == "tar.gz": has_tar = 1 if not has_tar: reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename)) @@ -762,6 +778,8 @@ def check_dsc(): files[orig_tar_gz] = {} files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE] files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"] + files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"] + files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"] files[orig_tar_gz]["section"] = files[dsc_filename]["section"] files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"] files[orig_tar_gz]["component"] = files[dsc_filename]["component"] @@ -778,9 +796,9 @@ def get_changelog_versions(source_dir): # Find the .dsc (again) dsc_filename = None - for file in files.keys(): - if files[file]["type"] == "dsc": - dsc_filename = file + for f in files.keys(): + if files[f]["type"] == "dsc": + dsc_filename = f # If there isn't one, we have nothing to do. (We have reject()ed the upload already) if not dsc_filename: @@ -788,14 +806,14 @@ def get_changelog_versions(source_dir): # Create a symlink mirror of the source files in our temporary directory for f in files.keys(): - m = daklib.utils.re_issource.match(f) + m = utils.re_issource.match(f) if m: src = os.path.join(source_dir, f) # If a file is missing for whatever reason, give up. if not os.path.exists(src): return - type = m.group(3) - if type == "orig.tar.gz" and pkg.orig_tar_gz: + ftype = m.group(3) + if ftype == "orig.tar.gz" and pkg.orig_tar_gz: continue dest = os.path.join(os.getcwd(), f) os.symlink(src, dest) @@ -811,14 +829,14 @@ def get_changelog_versions(source_dir): (result, output) = commands.getstatusoutput(cmd) if (result != 0): reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result)) - reject(daklib.utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "") + reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "") return if not Cnf.Find("Dir::Queue::BTSVersionTrack"): return # Get the upstream version - upstr_version = daklib.utils.re_no_epoch.sub('', dsc["version"]) + upstr_version = utils.re_no_epoch.sub('', dsc["version"]) if re_strip_revision.search(upstr_version): upstr_version = re_strip_revision.sub('', upstr_version) @@ -830,7 +848,7 @@ def get_changelog_versions(source_dir): # Parse the changelog dsc["bts changelog"] = "" - changelog_file = daklib.utils.open_file(changelog_filename) + changelog_file = utils.open_file(changelog_filename) for line in changelog_file.readlines(): m = re_changelog_versions.match(line) if m: @@ -845,7 +863,7 @@ def get_changelog_versions(source_dir): def check_source(): # Bail out if: - # a) there's no source + # a) there's no source # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files' # or c) the orig.tar.gz is MIA if not changes["architecture"].has_key("source") or reprocess == 2 \ @@ -854,11 +872,11 @@ def check_source(): # Create a temporary directory to extract the source into if Options["No-Action"]: - tmpdir = tempfile.mktemp() + tmpdir = tempfile.mkdtemp() else: # We're in queue/holding and can create a random directory. tmpdir = "%s" % (os.getpid()) - os.mkdir(tmpdir) + os.mkdir(tmpdir) # Move into the temporary directory cwd = os.getcwd() @@ -873,7 +891,7 @@ def check_source(): shutil.rmtree(tmpdir) except OSError, e: if errno.errorcode[e.errno] != 'EACCES': - daklib.utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"])) + utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"])) reject("%s: source tree could not be cleanly removed." % (dsc["source"])) # We probably have u-r or u-w directories so chmod everything @@ -881,10 +899,10 @@ def check_source(): cmd = "chmod -R u+rwx %s" % (tmpdir) result = os.system(cmd) if result != 0: - daklib.utils.fubar("'%s' failed with result %s." % (cmd, result)) + utils.fubar("'%s' failed with result %s." % (cmd, result)) shutil.rmtree(tmpdir) except: - daklib.utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"])) + utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"])) ################################################################################ @@ -894,47 +912,23 @@ def check_urgency (): if changes["architecture"].has_key("source"): if not changes.has_key("urgency"): changes["urgency"] = Cnf["Urgency::Default"] + changes["urgency"] = changes["urgency"].lower() if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"): reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ") changes["urgency"] = Cnf["Urgency::Default"] - changes["urgency"] = changes["urgency"].lower() ################################################################################ -def check_md5sums (): - for file in files.keys(): - try: - file_handle = daklib.utils.open_file(file) - except daklib.utils.cant_open_exc: - continue - - # Check md5sum - if apt_pkg.md5sum(file_handle) != files[file]["md5sum"]: - reject("%s: md5sum check failed." % (file)) - file_handle.close() - # Check size - actual_size = os.stat(file)[stat.ST_SIZE] - size = int(files[file]["size"]) - if size != actual_size: - reject("%s: actual file size (%s) does not match size (%s) in .changes" - % (file, actual_size, size)) - - for file in dsc_files.keys(): - try: - file_handle = daklib.utils.open_file(file) - except daklib.utils.cant_open_exc: - continue +def check_hashes (): + utils.check_hash(".changes", files, "md5", apt_pkg.md5sum) + utils.check_size(".changes", files) + utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum) + utils.check_size(".dsc", dsc_files) - # Check md5sum - if apt_pkg.md5sum(file_handle) != dsc_files[file]["md5sum"]: - reject("%s: md5sum check failed." % (file)) - file_handle.close() - # Check size - actual_size = os.stat(file)[stat.ST_SIZE] - size = int(dsc_files[file]["size"]) - if size != actual_size: - reject("%s: actual file size (%s) does not match size (%s) in .dsc" - % (file, actual_size, size)) + # This is stupid API, but it'll have to do for now until + # we actually have proper abstraction + for m in utils.ensure_hashes(changes, dsc, files, dsc_files): + reject(m) ################################################################################ @@ -967,7 +961,7 @@ def check_timestamps(): if files[filename]["type"] == "deb": tar.reset() try: - deb_file = daklib.utils.open_file(filename) + deb_file = utils.open_file(filename) apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz") deb_file.seek(0) try: @@ -1000,59 +994,6 @@ def check_timestamps(): except: reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value)) -################################################################################ -################################################################################ - -# We reject packages if the release team defined a transition for them -def check_transition(sourcepkg): - - # Only check if there is a file defined (and existant) with checks. It's a little bit - # specific to Debian, not much use for others, so return early there. - if not Cnf.has_key("Dinstall::Reject::ReleaseTransitions") or not os.path.exists("%s" % (Cnf["Dinstall::Reject::ReleaseTransitions"])): - return - - # Parse the yaml file - sourcefile = file(Cnf["Dinstall::Reject::ReleaseTransitions"], 'r') - try: - transitions = load(sourcefile) - except error, msg: - # This shouldn't happen, the release team has a wrapper to check the file, but better - # safe then sorry - daklib.utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg)) - return - - # Now look through all defined transitions - for trans in transition: - t = transition[trans] - source = t["source"] - new_vers = t["new"] - - # Will be None if nothing is in testing. - curvers = daklib.database.get_testing_version(source) - - if curvers and apt_pkg.VersionCompare(new_vers, curvers) == 1: - # This is still valid, the current version in database is older than - # the new version we wait for - - # Check if the source we look at is affected by this. - if sourcepkg in t['packages']: - # The source is affected, lets reject it. - reject("""%s: part of the %s transition. - - Your package is part of a testing transition to get %s migrated. - - Transition description: %s - - This transition will finish when %s, version %s, reaches testing (it currently - has version %s). - This transition is managed by the Release Team and %s - is the Release-Team member responsible for it. - Please contact them or debian-release@lists.debian.org if you - need further assistance. - """ - % (sourcepkg, trans, source, t["reason"], source, new_vers, curvers, t["rm"])) - return 0 - ################################################################################ def lookup_uid_from_fingerprint(fpr): @@ -1074,8 +1015,8 @@ def check_signed_by_key(): if uid == None: uid, uid_email = changes["fingerprint"], uid may_nmu, may_sponsor = 1, 1 - # XXX by default new dds don't have a fingerprint/uid in the db atm, - # and can't get one in there if we don't allow nmu/sponsorship + # XXX by default new dds don't have a fingerprint/uid in the db atm, + # and can't get one in there if we don't allow nmu/sponsorship elif uid[:3] == "dm:": uid_email = uid[3:] may_nmu, may_sponsor = 0, 0 @@ -1090,28 +1031,32 @@ def check_signed_by_key(): if uid_name == "": sponsored = 1 else: sponsored = 1 - - if sponsored and not may_sponsor: + if ("source" in changes["architecture"] and + uid_email and utils.is_email_alias(uid_email)): + sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"]) + if (changes["maintaineremail"] not in sponsor_addresses and + changes["changedbyemail"] not in sponsor_addresses): + changes["sponsoremail"] = uid_email + + if sponsored and not may_sponsor: reject("%s is not authorised to sponsor uploads" % (uid)) if not sponsored and not may_nmu: source_ids = [] - check_suites = changes["distribution"].keys() - if "unstable" not in check_suites: check_suites.append("unstable") + check_suites = changes["distribution"].keys() + if "unstable" not in check_suites: check_suites.append("unstable") for suite in check_suites: - suite_id = daklib.database.get_suite_id(suite) + suite_id = database.get_suite_id(suite) q = Upload.projectB.query("SELECT s.id FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND sa.suite = %d" % (changes["source"], suite_id)) for si in q.getresult(): if si[0] not in source_ids: source_ids.append(si[0]) - print "source_ids: %s" % (",".join([str(x) for x in source_ids])) - is_nmu = 1 for si in source_ids: is_nmu = 1 - q = Upload.projectB.query("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT maintainer FROM src_uploaders WHERE src_uploaders.source = %s)" % (si)) + q = Upload.projectB.query("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s AND s.dm_upload_allowed = 'yes')" % (si)) for m in q.getresult(): - (rfc822, rfc2047, name, email) = daklib.utils.fix_maintainer(m[0]) + (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0]) if email == uid_email or name == uid_name: is_nmu=0 break @@ -1120,54 +1065,18 @@ def check_signed_by_key(): for b in changes["binary"].keys(): for suite in changes["distribution"].keys(): - suite_id = daklib.database.get_suite_id(suite) - q = Upload.projectB.query("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = '%s' AND ba.suite = %s" % (b, suite_id)) - for s in q.getresult(): + suite_id = database.get_suite_id(suite) + q = Upload.projectB.query("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = '%s' AND ba.suite = %s" % (b, suite_id)) + for s in q.getresult(): if s[0] != changes["source"]: reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite)) - for file in files.keys(): - if files[file].has_key("byhand"): - reject("%s may not upload BYHAND file %s" % (uid, file)) - if files[file].has_key("new"): - reject("%s may not upload NEW file %s" % (uid, file)) - - # The remaining checks only apply to binary-only uploads right now - if changes["architecture"].has_key("source"): - return - - if not Cnf.Exists("Binary-Upload-Restrictions"): - return - - restrictions = Cnf.SubTree("Binary-Upload-Restrictions") - - # If the restrictions only apply to certain components make sure - # that the upload is actual targeted there. - if restrictions.Exists("Components"): - restricted_components = restrictions.SubTree("Components").ValueList() - is_restricted = False - for file in files: - if files[file]["component"] in restricted_components: - is_restricted = True - break - if not is_restricted: - return + for f in files.keys(): + if files[f].has_key("byhand"): + reject("%s may not upload BYHAND file %s" % (uid, f)) + if files[f].has_key("new"): + reject("%s may not upload NEW file %s" % (uid, f)) - # Assuming binary only upload restrictions are in place we then - # iterate over suite and architecture checking the key is in the - # allowed list. If no allowed list exists for a given suite or - # architecture it's assumed to be open to anyone. - for suite in changes["distribution"].keys(): - if not restrictions.Exists(suite): - continue - for arch in changes["architecture"].keys(): - if not restrictions.SubTree(suite).Exists(arch): - continue - allowed_keys = restrictions.SubTree("%s::%s" % (suite, arch)).ValueList() - if changes["fingerprint"] not in allowed_keys: - base_filename = os.path.basename(pkg.changes_file) - reject("%s: not signed by authorised uploader for %s/%s" - % (base_filename, suite, arch)) ################################################################################ ################################################################################ @@ -1183,9 +1092,9 @@ def upload_too_new(): file_list = pkg.files.keys() file_list.extend(pkg.dsc_files.keys()) file_list.append(pkg.changes_file) - for file in file_list: + for f in file_list: try: - last_modified = time.time()-os.path.getmtime(file) + last_modified = time.time()-os.path.getmtime(f) if last_modified < int(Cnf["Dinstall::SkipTime"]): too_new = 1 break @@ -1207,10 +1116,10 @@ def action (): # q-unapproved hax0ring queue_info = { "New": { "is": is_new, "process": acknowledge_new }, - "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand }, + "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand }, "Byhand" : { "is": is_byhand, "process": do_byhand }, - "OldStableUpdate" : { "is": is_oldstableupdate, - "process": do_oldstableupdate }, + "OldStableUpdate" : { "is": is_oldstableupdate, + "process": do_oldstableupdate }, "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate }, "Unembargo" : { "is": is_unembargo, "process": queue_unembargo }, "Embargo" : { "is": is_embargo, "process": queue_embargo }, @@ -1237,21 +1146,21 @@ def action (): if Options["Automatic"]: answer = 'R' else: - queue = None + qu = None for q in queues: if queue_info[q]["is"](): - queue = q + qu = q break - if queue: + if qu: print "%s for %s\n%s%s" % ( - queue.upper(), ", ".join(changes["distribution"].keys()), + qu.upper(), ", ".join(changes["distribution"].keys()), reject_message, summary), - queuekey = queue[0].upper() + queuekey = qu[0].upper() if queuekey in "RQSA": queuekey = "D" prompt = "[D]ivert, Skip, Quit ?" else: - prompt = "[%s]%s, Skip, Quit ?" % (queuekey, queue[1:].lower()) + prompt = "[%s]%s, Skip, Quit ?" % (queuekey, qu[1:].lower()) if Options["Automatic"]: answer = queuekey else: @@ -1261,8 +1170,8 @@ def action (): answer = 'A' while prompt.find(answer) == -1: - answer = daklib.utils.our_raw_input(prompt) - m = daklib.queue.re_default_answer.match(prompt) + answer = utils.our_raw_input(prompt) + m = queue.re_default_answer.match(prompt) if answer == "": answer = m.group(1) answer = answer[:1].upper() @@ -1274,15 +1183,15 @@ def action (): accept(summary, short_summary) remove_from_unchecked() elif answer == queuekey: - queue_info[queue]["process"](summary, short_summary) + queue_info[qu]["process"](summary, short_summary) remove_from_unchecked() elif answer == 'Q': sys.exit(0) def remove_from_unchecked(): os.chdir (pkg.directory) - for file in files.keys(): - os.unlink(file) + for f in files.keys(): + os.unlink(f) os.unlink(pkg.changes_file) ################################################################################ @@ -1294,16 +1203,16 @@ def accept (summary, short_summary): ################################################################################ def move_to_dir (dest, perms=0660, changesperms=0664): - daklib.utils.move (pkg.changes_file, dest, perms=changesperms) + utils.move (pkg.changes_file, dest, perms=changesperms) file_keys = files.keys() - for file in file_keys: - daklib.utils.move (file, dest, perms=perms) + for f in file_keys: + utils.move (f, dest, perms=perms) ################################################################################ def is_unembargo (): q = Upload.projectB.query( - "SELECT package FROM disembargo WHERE package = '%s' AND version = '%s'" % + "SELECT package FROM disembargo WHERE package = '%s' AND version = '%s'" % (changes["source"], changes["version"])) ql = q.getresult() if ql: @@ -1319,7 +1228,7 @@ def is_unembargo (): if Options["No-Action"]: return 1 Upload.projectB.query( - "INSERT INTO disembargo (package, version) VALUES ('%s', '%s')" % + "INSERT INTO disembargo (package, version) VALUES ('%s', '%s')" % (changes["source"], changes["version"])) return 1 @@ -1337,6 +1246,14 @@ def queue_unembargo (summary, short_summary): Upload.Subst["__SUMMARY__"] = summary Upload.check_override() + # Send accept mail, announce to lists, close bugs and check for + # override disparities + if not Cnf["Dinstall::Options::No-Mail"]: + Upload.Subst["__SUITE__"] = "" + mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted") + utils.send_mail(mail_message) + Upload.announce(short_summary, 1) + ################################################################################ def is_embargo (): @@ -1355,16 +1272,24 @@ def queue_embargo (summary, short_summary): Upload.Subst["__SUMMARY__"] = summary Upload.check_override() + # Send accept mail, announce to lists, close bugs and check for + # override disparities + if not Cnf["Dinstall::Options::No-Mail"]: + Upload.Subst["__SUITE__"] = "" + mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted") + utils.send_mail(mail_message) + Upload.announce(short_summary, 1) + ################################################################################ def is_stableupdate (): if not changes["distribution"].has_key("proposed-updates"): - return 0 + return 0 if not changes["architecture"].has_key("source"): - pusuite = daklib.database.get_suite_id("proposed-updates") + pusuite = database.get_suite_id("proposed-updates") q = Upload.projectB.query( - "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" % + "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" % (changes["source"], changes["version"], pusuite)) ql = q.getresult() if ql: @@ -1378,7 +1303,7 @@ def do_stableupdate (summary, short_summary): Logger.log(["Moving to proposed-updates", pkg.changes_file]); Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"]); - move_to_dir(Cnf["Dir::Queue::ProposedUpdates"]) + move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664) # Check for override disparities Upload.Subst["__SUMMARY__"] = summary; @@ -1388,12 +1313,12 @@ def do_stableupdate (summary, short_summary): def is_oldstableupdate (): if not changes["distribution"].has_key("oldstable-proposed-updates"): - return 0 + return 0 if not changes["architecture"].has_key("source"): - pusuite = daklib.database.get_suite_id("oldstable-proposed-updates") + pusuite = database.get_suite_id("oldstable-proposed-updates") q = Upload.projectB.query( - "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" % + "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" % (changes["source"], changes["version"], pusuite)) ql = q.getresult() if ql: @@ -1407,7 +1332,7 @@ def do_oldstableupdate (summary, short_summary): Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file]); Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"]); - move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"]) + move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664) # Check for override disparities Upload.Subst["__SUMMARY__"] = summary; @@ -1418,61 +1343,61 @@ def do_oldstableupdate (summary, short_summary): def is_autobyhand (): all_auto = 1 any_auto = 0 - for file in files.keys(): - if files[file].has_key("byhand"): - any_auto = 1 - - # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH - # don't contain underscores, and ARCH doesn't contain dots. - # further VER matches the .changes Version:, and ARCH should be in - # the .changes Architecture: list. - if file.count("_") < 2: - all_auto = 0 - continue - - (pkg, ver, archext) = file.split("_", 2) - if archext.count(".") < 1 or changes["version"] != ver: - all_auto = 0 - continue - - ABH = Cnf.SubTree("AutomaticByHandPackages") - if not ABH.has_key(pkg) or \ - ABH["%s::Source" % (pkg)] != changes["source"]: - print "not match %s %s" % (pkg, changes["source"]) - all_auto = 0 - continue - - (arch, ext) = archext.split(".", 1) - if arch not in changes["architecture"]: - all_auto = 0 - continue - - files[file]["byhand-arch"] = arch - files[file]["byhand-script"] = ABH["%s::Script" % (pkg)] + for f in files.keys(): + if files[f].has_key("byhand"): + any_auto = 1 + + # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH + # don't contain underscores, and ARCH doesn't contain dots. + # further VER matches the .changes Version:, and ARCH should be in + # the .changes Architecture: list. + if f.count("_") < 2: + all_auto = 0 + continue + + (pckg, ver, archext) = f.split("_", 2) + if archext.count(".") < 1 or changes["version"] != ver: + all_auto = 0 + continue + + ABH = Cnf.SubTree("AutomaticByHandPackages") + if not ABH.has_key(pckg) or \ + ABH["%s::Source" % (pckg)] != changes["source"]: + print "not match %s %s" % (pckg, changes["source"]) + all_auto = 0 + continue + + (arch, ext) = archext.split(".", 1) + if arch not in changes["architecture"]: + all_auto = 0 + continue + + files[f]["byhand-arch"] = arch + files[f]["byhand-script"] = ABH["%s::Script" % (pckg)] return any_auto and all_auto def do_autobyhand (summary, short_summary): print "Attempting AUTOBYHAND." byhandleft = 0 - for file in files.keys(): - byhandfile = file - if not files[file].has_key("byhand"): + for f in files.keys(): + byhandfile = f + if not files[f].has_key("byhand"): continue - if not files[file].has_key("byhand-script"): + if not files[f].has_key("byhand-script"): byhandleft = 1 continue os.system("ls -l %s" % byhandfile) result = os.system("%s %s %s %s %s" % ( - files[file]["byhand-script"], byhandfile, - changes["version"], files[file]["byhand-arch"], + files[f]["byhand-script"], byhandfile, + changes["version"], files[f]["byhand-arch"], os.path.abspath(pkg.changes_file))) if result == 0: os.unlink(byhandfile) - del files[file] + del files[f] else: - print "Error processing %s, left as byhand." % (file) + print "Error processing %s, left as byhand." % (f) byhandleft = 1 if byhandleft: @@ -1483,8 +1408,8 @@ def do_autobyhand (summary, short_summary): ################################################################################ def is_byhand (): - for file in files.keys(): - if files[file].has_key("byhand"): + for f in files.keys(): + if files[f].has_key("byhand"): return 1 return 0 @@ -1502,8 +1427,8 @@ def do_byhand (summary, short_summary): ################################################################################ def is_new (): - for file in files.keys(): - if files[file].has_key("new"): + for f in files.keys(): + if files[f].has_key("new"): return 1 return 0 @@ -1519,8 +1444,8 @@ def acknowledge_new (summary, short_summary): if not Options["No-Mail"]: print "Sending new ack." Subst["__SUMMARY__"] = summary - new_ack_message = daklib.utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new") - daklib.utils.send_mail(new_ack_message) + new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new") + utils.send_mail(new_ack_message) ################################################################################ @@ -1561,7 +1486,7 @@ def process_it (changes_file): # Relativize the filename so we use the copy in holding # rather than the original... pkg.changes_file = os.path.basename(pkg.changes_file) - changes["fingerprint"] = daklib.utils.check_signature(pkg.changes_file, reject) + changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject) if changes["fingerprint"]: valid_changes_p = check_changes() else: @@ -1573,19 +1498,17 @@ def process_it (changes_file): valid_dsc_p = check_dsc() if valid_dsc_p: check_source() - check_md5sums() + check_hashes() check_urgency() check_timestamps() check_signed_by_key() - if changes["architecture"].has_key("source"): - check_transition(changes["source"]) Upload.update_subst(reject_message) action() except SystemExit: raise except: print "ERROR" - traceback.print_exc(file=sys.stderr) + traceback.print_exc(file=sys.stderr) pass # Restore previous WD @@ -1603,18 +1526,18 @@ def main(): Options["Automatic"] = "" # Ensure all the arguments we were given are .changes files - for file in changes_files: - if not file.endswith(".changes"): - daklib.utils.warn("Ignoring '%s' because it's not a .changes file." % (file)) - changes_files.remove(file) + for f in changes_files: + if not f.endswith(".changes"): + utils.warn("Ignoring '%s' because it's not a .changes file." % (f)) + changes_files.remove(f) if changes_files == []: - daklib.utils.fubar("Need at least one .changes file as an argument.") + utils.fubar("Need at least one .changes file as an argument.") # Check that we aren't going to clash with the daily cron job if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]: - daklib.utils.fubar("Archive maintenance in progress. Try again later.") + utils.fubar("Archive maintenance in progress. Try again later.") # Obtain lock if not in no-action mode and initialize the log @@ -1624,10 +1547,10 @@ def main(): fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError, e: if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN': - daklib.utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.") + utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.") else: raise - Logger = Upload.Logger = daklib.logging.Logger(Cnf, "process-unchecked") + Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked") # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $" @@ -1638,7 +1561,7 @@ def main(): # Sort the .changes files so that we process sourceful ones first - changes_files.sort(daklib.utils.changes_compare) + changes_files.sort(utils.changes_compare) # Process the changes files for changes_file in changes_files: @@ -1655,7 +1578,7 @@ def main(): sets = "set" if accept_count > 1: sets = "sets" - print "Accepted %d package %s, %s." % (accept_count, sets, daklib.utils.size_type(int(accept_bytes))) + print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes))) Logger.log(["total",accept_count,accept_bytes]) if not Options["No-Action"]: @@ -1665,4 +1588,3 @@ def main(): if __name__ == '__main__': main() -