X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fqueue.py;h=d0772276f20158f9ce3206d319ad3857222c5c8d;hb=5ae0dac7c89af97eab8ba4fb34e1099a3b9dddc7;hp=c5cd4a68b8595992bfeeafd2ebdd2fc30bdf12cb;hpb=e39ac4bb9c19e58585b64fba058fb6218a24d9e3;p=dak.git diff --git a/daklib/queue.py b/daklib/queue.py old mode 100755 new mode 100644 index c5cd4a68..d0772276 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -51,7 +51,7 @@ from holding import Holding from urgencylog import UrgencyLog from dbconn import * from summarystats import SummaryStats -from utils import parse_changes, check_dsc_files, build_package_set +from utils import parse_changes, check_dsc_files, build_package_list from textutils import fix_maintainer from lintian import parse_lintian_output, generate_reject_messages from contents import UnpackedSource @@ -133,8 +133,8 @@ def determine_new(filename, changes, files, warn=1, session = None, dsc = None, # Try to get the Package-Set field from an included .dsc file (if possible). if dsc: - for package, entry in build_package_set(dsc, session).items(): - if not new.has_key(package): + for package, entry in build_package_list(dsc, session).items(): + if package not in new: new[package] = entry # Build up a list of potentially new things @@ -290,9 +290,9 @@ class TarTime(object): def callback(self, member, data): if member.mtime > self.future_cutoff: - self.future_files[Name] = MTime + self.future_files[Name] = member.mtime if member.mtime < self.past_cutoff: - self.ancient_files[Name] = MTime + self.ancient_files[Name] = member.mtime ############################################################################### @@ -386,8 +386,9 @@ def edit_note(note, upload, session, trainee=False): ############################################################################### +# FIXME: Should move into the database # suite names DMs can upload to -dm_suites = ['unstable', 'experimental'] +dm_suites = ['unstable', 'experimental', 'squeeze-backports'] def get_newest_source(source, session): 'returns the newest DBSource object in dm_suites' @@ -557,7 +558,7 @@ class Upload(object): except CantOpenError: self.rejects.append("%s: can't read file." % (filename)) return False - except ParseChangesError, line: + except ParseChangesError as line: self.rejects.append("%s: parse error, can't grok: %s." % (filename, line)) return False except ChangesUnicodeError: @@ -567,10 +568,10 @@ class Upload(object): # Parse the Files field from the .changes into another dictionary try: self.pkg.files.update(utils.build_file_list(self.pkg.changes)) - except ParseChangesError, line: + except ParseChangesError as line: self.rejects.append("%s: parse error, can't grok: %s." % (filename, line)) return False - except UnknownFormatError, format: + except UnknownFormatError as format: self.rejects.append("%s: unknown format '%s'." % (filename, format)) return False @@ -608,7 +609,7 @@ class Upload(object): self.pkg.changes["maintainername"], self.pkg.changes["maintaineremail"]) = \ fix_maintainer (self.pkg.changes["maintainer"]) - except ParseMaintError, msg: + except ParseMaintError as msg: self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \ % (filename, self.pkg.changes["maintainer"], msg)) @@ -619,7 +620,7 @@ class Upload(object): self.pkg.changes["changedbyname"], self.pkg.changes["changedbyemail"]) = \ fix_maintainer (self.pkg.changes.get("changed-by", "")) - except ParseMaintError, msg: + except ParseMaintError as msg: self.pkg.changes["changedby822"] = "" self.pkg.changes["changedby2047"] = "" self.pkg.changes["changedbyname"] = "" @@ -654,7 +655,7 @@ class Upload(object): Cnf = Config() # Handle suite mappings - for m in Cnf.ValueList("SuiteMappings"): + for m in Cnf.value_list("SuiteMappings"): args = m.split() mtype = args[0] if mtype == "map" or mtype == "silent-map": @@ -700,7 +701,7 @@ class Upload(object): # Ensure target distributions exist for suite in self.pkg.changes["distribution"].keys(): - if not Cnf.has_key("Suite::%s" % (suite)): + if not get_suite(suite.lower()): self.rejects.append("Unknown distribution `%s'." % (suite)) ########################################################################### @@ -712,47 +713,39 @@ class Upload(object): # Extract package control information deb_file = utils.open_file(f) try: - control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file)) + control = apt_pkg.TagSection(utils.deb_extract_control(deb_file)) except: - self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type)) + self.rejects.append("%s: deb_extract_control() raised %s." % (f, sys.exc_info()[0])) deb_file.close() # Can't continue, none of the checks on control would work. return - # Check for mandantory "Description:" - deb_file.seek(0) - try: - apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n' - except: - self.rejects.append("%s: Missing Description in binary package" % (f)) - return - deb_file.close() # Check for mandatory fields - for field in [ "Package", "Architecture", "Version" ]: - if control.Find(field) == None: + for field in [ "Package", "Architecture", "Version", "Description" ]: + if field not in control: # Can't continue self.rejects.append("%s: No %s field in control." % (f, field)) return # Ensure the package name matches the one give in the .changes - if not self.pkg.changes["binary"].has_key(control.Find("Package", "")): - self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", ""))) + if not self.pkg.changes["binary"].has_key(control.find("Package", "")): + self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.find("Package", ""))) # Validate the package field - package = control.Find("Package") + package = control["Package"] if not re_valid_pkg_name.match(package): self.rejects.append("%s: invalid package name '%s'." % (f, package)) # Validate the version field - version = control.Find("Version") + version = control["Version"] if not re_valid_version.match(version): self.rejects.append("%s: invalid version number '%s'." % (f, version)) # Ensure the architecture of the .deb is one we know about. default_suite = cnf.get("Dinstall::DefaultSuite", "unstable") - architecture = control.Find("Architecture") + architecture = control["Architecture"] upload_suite = self.pkg.changes["distribution"].keys()[0] if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \ @@ -765,13 +758,13 @@ class Upload(object): self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture)) # Sanity-check the Depends field - depends = control.Find("Depends") + depends = control.find("Depends") if depends == '': self.rejects.append("%s: Depends field is empty." % (f)) # Sanity-check the Provides field - provides = control.Find("Provides") - if provides: + provides = control.find("Provides") + if provides is not None: provide = re_spacestrip.sub('', provides) if provide == '': self.rejects.append("%s: Provides field is empty." % (f)) @@ -782,8 +775,8 @@ class Upload(object): # If there is a Built-Using field, we need to check we can find the # exact source version - built_using = control.Find("Built-Using") - if built_using: + built_using = control.find("Built-Using") + if built_using is not None: try: entry["built-using"] = [] for dep in apt_pkg.parse_depends(built_using): @@ -800,24 +793,24 @@ class Upload(object): else: entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) ) - except ValueError, e: + except ValueError as e: self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e))) # Check the section & priority match those given in the .changes (non-fatal) - if control.Find("Section") and entry["section"] != "" \ - and entry["section"] != control.Find("Section"): + if control.find("Section") and entry["section"] != "" \ + and entry["section"] != control.find("Section"): self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \ - (f, control.Find("Section", ""), entry["section"])) - if control.Find("Priority") and entry["priority"] != "" \ - and entry["priority"] != control.Find("Priority"): + (f, control.find("Section", ""), entry["section"])) + if control.find("Priority") and entry["priority"] != "" \ + and entry["priority"] != control.find("Priority"): self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \ - (f, control.Find("Priority", ""), entry["priority"])) + (f, control.find("Priority", ""), entry["priority"])) entry["package"] = package entry["architecture"] = architecture entry["version"] = version - entry["maintainer"] = control.Find("Maintainer", "") + entry["maintainer"] = control.find("Maintainer", "") if f.endswith(".udeb"): self.pkg.files[f]["dbtype"] = "udeb" @@ -826,7 +819,7 @@ class Upload(object): else: self.rejects.append("%s is neither a .deb or a .udeb." % (f)) - entry["source"] = control.Find("Source", entry["package"]) + entry["source"] = control.find("Source", entry["package"]) # Get the source version source = entry["source"] @@ -851,7 +844,7 @@ class Upload(object): if entry["package"] != file_package: self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \ (f, file_package, entry["dbtype"], entry["package"])) - epochless_version = re_no_epoch.sub('', control.Find("Version")) + epochless_version = re_no_epoch.sub('', control.find("Version")) # version file_version = m.group(2) @@ -879,15 +872,22 @@ class Upload(object): # Check in one of the other directories source_epochless_version = re_no_epoch.sub('', source_version) dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version) - if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)): + + byhand_dir = get_policy_queue('byhand', session).path + new_dir = get_policy_queue('new', session).path + + if os.path.exists(os.path.join(byhand_dir, dsc_filename)): entry["byhand"] = 1 - elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)): + elif os.path.exists(os.path.join(new_dir, dsc_filename)): entry["new"] = 1 else: dsc_file_exists = False - for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]: - if cnf.has_key("Dir::Queue::%s" % (myq)): - if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)): + # TODO: Don't hardcode this list: use all relevant queues + # The question is how to determine what is relevant + for queue_name in ["embargoed", "unembargoed", "proposedupdates", "oldproposedupdates"]: + queue = get_policy_queue(queue_name, session) + if queue: + if os.path.exists(os.path.join(queue.path, dsc_filename)): dsc_file_exists = True break @@ -954,15 +954,14 @@ class Upload(object): return # Handle component mappings - for m in cnf.ValueList("ComponentMappings"): + for m in cnf.value_list("ComponentMappings"): (source, dest) = m.split() if entry["component"] == source: entry["original component"] = source entry["component"] = dest # Ensure the component is valid for the target suite - if cnf.has_key("Suite:%s::Components" % (suite)) and \ - entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)): + if entry["component"] not in get_component_names(session): self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite)) return @@ -1040,7 +1039,7 @@ class Upload(object): or (dbc.in_queue is not None and dbc.in_queue.queue_name not in ["unchecked", "newstage"]): self.rejects.append("%s file already known to dak" % base_filename) - except NoResultFound, e: + except NoResultFound as e: # not known, good pass @@ -1049,10 +1048,11 @@ class Upload(object): for f, entry in self.pkg.files.items(): # Ensure the file does not already exist in one of the accepted directories - for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]: - if not cnf.has_key("Dir::Queue::%s" % (d)): continue - if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)): - self.rejects.append("%s file already exists in the %s directory." % (f, d)) + # TODO: Dynamically generate this list + for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]: + queue = get_policy_queue(queue_name, session) + if queue and os.path.exists(os.path.join(queue.path, f)): + self.rejects.append("%s file already exists in the %s queue." % (f, queue_name)) if not re_taint_free.match(f): self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f)) @@ -1108,7 +1108,7 @@ class Upload(object): if not has_source: self.rejects.append("no source found and Architecture line in changes mention source.") - if (not has_binaries) and (not cnf.FindB("Dinstall::AllowSourceOnlyUploads")): + if (not has_binaries) and (not cnf.find_b("Dinstall::AllowSourceOnlyUploads")): self.rejects.append("source only uploads are not supported.") ########################################################################### @@ -1156,9 +1156,9 @@ class Upload(object): except CantOpenError: if not action: return False, "%s: can't read file." % (dsc_filename) - except ParseChangesError, line: + except ParseChangesError as line: return False, "%s: parse error, can't grok: %s." % (dsc_filename, line) - except InvalidDscError, line: + except InvalidDscError as line: return False, "%s: syntax error on line %s." % (dsc_filename, line) except ChangesUnicodeError: return False, "%s: dsc file not proper utf-8." % (dsc_filename) @@ -1173,6 +1173,9 @@ class Upload(object): if not self.pkg.changes["architecture"].has_key("source"): return True + if session is None: + session = DBConn().session() + (status, reason) = self.load_dsc(action=action) if not status: self.rejects.append(reason) @@ -1189,10 +1192,10 @@ class Upload(object): except NoFilesFieldError: self.rejects.append("%s: no Files: field." % (dsc_filename)) return False - except UnknownFormatError, format: + except UnknownFormatError as format: self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format)) return False - except ParseChangesError, line: + except ParseChangesError as line: self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line)) return False @@ -1210,7 +1213,11 @@ class Upload(object): # Only a limited list of source formats are allowed in each suite for dist in self.pkg.changes["distribution"].keys(): - allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ] + suite = get_suite(dist, session=session) + if not suite: + self.rejects.append("%s: cannot find suite %s when checking source formats" % (dsc_filename, dist)) + continue + allowed = [ x.format_name for x in suite.srcformats ] if self.pkg.dsc["format"] not in allowed: self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed))) @@ -1218,7 +1225,7 @@ class Upload(object): try: # We ignore the return value fix_maintainer(self.pkg.dsc["maintainer"]) - except ParseMaintError, msg: + except ParseMaintError as msg: self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \ % (dsc_filename, self.pkg.dsc["maintainer"], msg)) @@ -1228,7 +1235,7 @@ class Upload(object): if field: # Have apt try to parse them... try: - apt_pkg.ParseSrcDepends(field) + apt_pkg.parse_src_depends(field) except: self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title())) @@ -1311,11 +1318,11 @@ class Upload(object): # Extract the source try: unpacked = UnpackedSource(dsc_filename) - except: - self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename) + except Exception as e: + self.rejects.append("'dpkg-source -x' failed for %s. (%s)" % (dsc_filename, str(e))) return - if not cnf.Find("Dir::Queue::BTSVersionTrack"): + if not cnf.find("Dir::BTSVersionTrack"): return # Get the upstream version @@ -1362,7 +1369,7 @@ class Upload(object): try: shutil.rmtree(tmpdir) - except OSError, e: + except OSError as e: if e.errno != errno.EACCES: print "foobar" utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"])) @@ -1375,7 +1382,7 @@ class Upload(object): if result != 0: utils.fubar("'%s' failed with result %s." % (cmd, result)) shutil.rmtree(tmpdir) - except Exception, e: + except Exception as e: print "foobar2 (%s)" % e utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"])) @@ -1495,16 +1502,16 @@ class Upload(object): continue # Look in some other queues for the file - queues = ('New', 'Byhand', 'ProposedUpdates', - 'OldProposedUpdates', 'Embargoed', 'Unembargoed') + queue_names = ['new', 'byhand', + 'proposedupdates', 'oldproposedupdates', + 'embargoed', 'unembargoed'] - for queue in queues: - if not cnf.get('Dir::Queue::%s' % queue): + for queue_name in queue_names: + queue = get_policy_queue(queue_name, session) + if not queue: continue - queuefile_path = os.path.join( - cnf['Dir::Queue::%s' % queue], filename - ) + queuefile_path = os.path.join(queue.path, filename) if not os.path.exists(queuefile_path): # Does not exist in this queue @@ -1548,7 +1555,7 @@ class Upload(object): try: lintiantags = yaml.load(sourcecontent)['lintian'] - except yaml.YAMLError, msg: + except yaml.YAMLError as msg: utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg)) return @@ -1599,7 +1606,7 @@ class Upload(object): if not self.pkg.changes.has_key("urgency"): self.pkg.changes["urgency"] = cnf["Urgency::Default"] self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower() - if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"): + if self.pkg.changes["urgency"] not in cnf.value_list("Urgency::Valid"): self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \ (self.pkg.changes["urgency"], cnf["Urgency::Default"])) self.pkg.changes["urgency"] = cnf["Urgency::Default"] @@ -1640,9 +1647,13 @@ class Upload(object): self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])." % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date))) except: - self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value)) + self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_info()[0], sys.exc_info()[1])) def check_if_upload_is_sponsored(self, uid_email, uid_name): + for key in "maintaineremail", "changedbyemail", "maintainername", "changedbyname": + if not self.pkg.changes.has_key(key): + return False + uid_email = '@'.join(uid_email.split('@')[:2]) if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]: sponsored = False elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]: @@ -1651,8 +1662,12 @@ class Upload(object): sponsored = True else: sponsored = True + sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"]) + debian_emails = filter(lambda addr: addr.endswith('@debian.org'), sponsor_addresses) + if uid_email not in debian_emails: + if debian_emails: + uid_email = debian_emails[0] if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)): - sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"]) if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and self.pkg.changes["changedbyemail"] not in sponsor_addresses): self.pkg.changes["sponsoremail"] = uid_email @@ -1785,7 +1800,7 @@ class Upload(object): r = get_newest_source(self.pkg.changes["source"], session) if r is None: - rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"] + rej = "Could not find existing source package %s in the DM allowed suites and this is a DM upload" % self.pkg.changes["source"] self.rejects.append(rej) return @@ -1840,7 +1855,7 @@ class Upload(object): # Also only check if there is a file defined (and existant) with # checks. - transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "") + transpath = cnf.get("Dinstall::ReleaseTransitions", "") if transpath == "" or not os.path.exists(transpath): return @@ -1849,7 +1864,7 @@ class Upload(object): sourcecontent = sourcefile.read() try: transitions = yaml.load(sourcecontent) - except yaml.YAMLError, msg: + except yaml.YAMLError as msg: # This shouldn't happen, there is a wrapper to edit the file which # checks it, but we prefer to be safe than ending up rejecting # everything. @@ -1865,7 +1880,7 @@ class Upload(object): # Will be None if nothing is in testing. current = get_source_in_suite(source, "testing", session) if current is not None: - compare = apt_pkg.VersionCompare(current.version, expected) + compare = apt_pkg.version_compare(current.version, expected) if current is None or compare < 0: # This is still valid, the current version in testing is older than @@ -1911,8 +1926,7 @@ transition is done.""" # This is for direport's benefit... f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", "")) - if byhand or new: - summary += "Changes: " + f + summary += "\n\nChanges:\n" + f summary += "\n\nOverride entries for your package:\n" + override_summary + "\n" @@ -1993,26 +2007,31 @@ distribution.""" """ cnf = Config() - announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce') + + # Skip all of this if not sending mail to avoid confusing people + if cnf.has_key("Dinstall::Options::No-Mail") and cnf["Dinstall::Options::No-Mail"]: + return "" # Only do announcements for source uploads with a recent dpkg-dev installed if float(self.pkg.changes.get("format", 0)) < 1.6 or not \ self.pkg.changes["architecture"].has_key("source"): return "" - lists_done = {} - summary = "" + announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce') - self.Subst["__SHORT_SUMMARY__"] = short_summary + lists_todo = {} + summary = "" + # Get a unique list of target lists for dist in self.pkg.changes["distribution"].keys(): suite = get_suite(dist) if suite is None: continue - announce_list = suite.announce - if announce_list == "" or lists_done.has_key(announce_list): - continue + for tgt in suite.announce: + lists_todo[tgt] = 1 - lists_done[announce_list] = 1 + self.Subst["__SHORT_SUMMARY__"] = short_summary + + for announce_list in lists_todo.keys(): summary += "Announcing to %s\n" % (announce_list) if action: @@ -2028,7 +2047,7 @@ distribution.""" del self.Subst["__ANNOUNCE_LIST_ADDRESS__"] - if cnf.FindB("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"): + if cnf.find_b("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"): summary = self.close_bugs(summary, action) del self.Subst["__SHORT_SUMMARY__"] @@ -2174,7 +2193,7 @@ distribution.""" # Move the .changes into the 'done' directory ye, mo, da = time.gmtime()[0:3] - donedir = os.path.join(cnf["Dir::Queue::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da) + donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da) if not os.path.isdir(donedir): os.makedirs(donedir) @@ -2192,19 +2211,19 @@ distribution.""" self.announce(short_summary, 1) ## Helper stuff for DebBugs Version Tracking - if cnf.Find("Dir::Queue::BTSVersionTrack"): + if cnf.find("Dir::BTSVersionTrack"): if self.pkg.changes["architecture"].has_key("source"): - (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".") + (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".") version_history = os.fdopen(fd, 'w') version_history.write(self.pkg.dsc["bts changelog"]) version_history.close() - filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"], + filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"], self.pkg.changes_file[:-8]+".versions") os.rename(temp_filename, filename) - os.chmod(filename, 0644) + os.chmod(filename, 0o644) # Write out the binary -> source mapping. - (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".") + (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".") debinfo = os.fdopen(fd, 'w') for name, entry in sorted(self.pkg.files.items()): if entry["type"] == "deb": @@ -2213,10 +2232,10 @@ distribution.""" entry["source version"]]) debinfo.write(line+"\n") debinfo.close() - filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"], + filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"], self.pkg.changes_file[:-8]+".debinfo") os.rename(temp_filename, filename) - os.chmod(filename, 0644) + os.chmod(filename, 0o644) session.commit() @@ -2245,7 +2264,7 @@ distribution.""" cnf = Config() # Abandon the check if override disparity checks have been disabled - if not cnf.FindB("Dinstall::OverrideDisparityCheck"): + if not cnf.find_b("Dinstall::OverrideDisparityCheck"): return summary = self.pkg.check_override() @@ -2315,11 +2334,11 @@ distribution.""" if os.access(file_entry, os.R_OK) == 0: continue - dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry) + dest_file = os.path.join(cnf["Dir::Reject"], file_entry) try: - dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644) - except OSError, e: + dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0o644) + except OSError as e: # File exists? Let's find a new name by adding a number if e.errno == errno.EEXIST: try: @@ -2327,13 +2346,13 @@ distribution.""" except NoFreeFilenameError: # Something's either gone badly Pete Tong, or # someone is trying to exploit us. - utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"])) + utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Reject"])) return # Make sure we really got it try: - dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644) - except OSError, e: + dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644) + except OSError as e: # Likewise utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry)) return @@ -2341,7 +2360,7 @@ distribution.""" raise # If we got here, we own the destination file, so we can # safely overwrite it. - utils.move(file_entry, dest_file, 1, perms=0660) + utils.move(file_entry, dest_file, 1, perms=0o660) os.close(dest_fd) ########################################################################### @@ -2397,17 +2416,26 @@ distribution.""" cnf = Config() reason_filename = self.pkg.changes_file[:-8] + ".reason" - reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename) + reason_filename = os.path.join(cnf["Dir::Reject"], reason_filename) + changesfile = os.path.join(cnf["Dir::Reject"], self.pkg.changes_file) # Move all the files into the reject directory reject_files = self.pkg.files.keys() + [self.pkg.changes_file] self.force_reject(reject_files) + # Change permissions of the .changes file to be world readable + try: + os.chmod(changesfile, os.stat(changesfile).st_mode | stat.S_IROTH) + except OSError as (errno, strerror): + # Ignore 'Operation not permitted' error. + if errno != 1: + raise + # If we fail here someone is probably trying to exploit the race # so let's just raise an exception ... if os.path.exists(reason_filename): os.unlink(reason_filename) - reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644) + reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644) rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected") @@ -2512,7 +2540,7 @@ distribution.""" anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ] for (s, v) in sv_list: if s in [ x.lower() for x in anysuite ]: - if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0: + if not anyversion or apt_pkg.version_compare(anyversion, v) <= 0: anyversion = v return anyversion @@ -2553,7 +2581,7 @@ distribution.""" must_be_newer_than.append(target_suite) for (suite, existent_version) in sv_list: - vercmp = apt_pkg.VersionCompare(new_version, existent_version) + vercmp = apt_pkg.version_compare(new_version, existent_version) if suite in must_be_newer_than and sourceful and vercmp < 1: self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite)) @@ -2587,12 +2615,12 @@ distribution.""" # we could just stick with the "...old version..." REJECT # for this, I think. self.rejects.append("Won't propogate NEW packages.") - elif apt_pkg.VersionCompare(new_version, add_version) < 0: + elif apt_pkg.version_compare(new_version, add_version) < 0: # propogation would be redundant. no need to reject though. self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite)) cansave = 1 - elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \ - apt_pkg.VersionCompare(add_version, target_version) >= 0: + elif apt_pkg.version_compare(new_version, add_version) > 0 and \ + apt_pkg.version_compare(add_version, target_version) >= 0: # propogate!! self.warnings.append("Propogating upload to %s" % (addsuite)) self.pkg.changes.setdefault("propdistribution", {}) @@ -2747,12 +2775,15 @@ distribution.""" orig_files[dsc_name]["path"] = old_file orig_files[dsc_name]["location"] = x.location.location_id else: - # TODO: Record the queues and info in the DB so we don't hardcode all this crap + # TODO: Determine queue list dynamically # Not there? Check the queue directories... - for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]: - if not Cnf.has_key("Dir::Queue::%s" % (directory)): + for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]: + queue = get_policy_queue(queue_name, session) + if not queue: continue - in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name) + + in_otherdir = os.path.join(queue.path, dsc_name) + if os.path.exists(in_otherdir): in_otherdir_fh = utils.open_file(in_otherdir) actual_md5 = apt_pkg.md5sum(in_otherdir_fh) @@ -2799,10 +2830,10 @@ distribution.""" source_epochless_version = re_no_epoch.sub('', source_version) dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version) found = False - for q in ["Embargoed", "Unembargoed", "Newstage"]: - if cnf.has_key("Dir::Queue::%s" % (q)): - if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename): - found = True + for queue_name in ["embargoed", "unembargoed", "newstage"]: + queue = get_policy_queue(queue_name, session) + if queue and os.path.exists(os.path.join(queue.path, dsc_filename)): + found = True if not found: self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))