X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fqueue.py;h=03bc7e05d17f4ecf05b1ad8fb3448bd41240ff05;hb=9b3cf7b8bdd96df4e9d34c661fc78f4f552a2dd6;hp=ea7f76592dd1794c12361b560bbc399864cb7272;hpb=27d0120008e5a9e153d82f1f4db4dd0e78f7d88b;p=dak.git diff --git a/daklib/queue.py b/daklib/queue.py index ea7f7659..03bc7e05 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -50,7 +50,7 @@ from config import Config from holding import Holding from dbconn import * from summarystats import SummaryStats -from utils import parse_changes +from utils import parse_changes, check_dsc_files from textutils import fix_maintainer from binary import Binary @@ -73,7 +73,7 @@ def get_type(f, session): # Determine the type if f.has_key("dbtype"): file_type = file["dbtype"] - elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]: + elif re_source_ext.match(f["type"]): file_type = "dsc" else: utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type)) @@ -690,11 +690,12 @@ class Upload(object): # Check the version and for file overwrites self.check_binary_against_db(f, session) - b = Binary(f) - b.scan_package() - if len(b.rejects) > 0: - for j in b.rejects: - self.rejects.append(j) + # Temporarily disable contents generation until we change the table storage layout + #b = Binary(f) + #b.scan_package() + #if len(b.rejects) > 0: + # for j in b.rejects: + # self.rejects.append(j) def source_file_checks(self, f, session): entry = self.pkg.files[f] @@ -712,7 +713,7 @@ class Upload(object): self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"])) # Ensure the source version matches the version in the .changes file - if entry["type"] == "orig.tar.gz": + if re_is_orig_source.match(f): changes_version = self.pkg.changes["chopversion2"] else: changes_version = self.pkg.changes["chopversion"] @@ -920,7 +921,7 @@ class Upload(object): self.rejects.append("source only uploads are not supported.") ########################################################################### - def check_dsc(self, action=True): + def check_dsc(self, action=True, session=None): """Returns bool indicating whether or not the source changes are valid""" # Ensure there is source to check if not self.pkg.changes["architecture"].has_key("source"): @@ -980,10 +981,11 @@ class Upload(object): if not re_valid_version.match(self.pkg.dsc["version"]): self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"])) - # Bumping the version number of the .dsc breaks extraction by stable's - # dpkg-source. So let's not do that... - if self.pkg.dsc["format"] != "1.0": - self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename)) + # Only a limited list of source formats are allowed in each suite + for dist in self.pkg.changes["distribution"].keys(): + allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ] + if self.pkg.dsc["format"] not in allowed: + self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed))) # Validate the Maintainer field try: @@ -1015,19 +1017,8 @@ class Upload(object): if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]: self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version)) - # Ensure there is a .tar.gz in the .dsc file - has_tar = False - for f in self.pkg.dsc_files.keys(): - m = re_issource.match(f) - if not m: - self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f)) - continue - ftype = m.group(3) - if ftype == "orig.tar.gz" or ftype == "tar.gz": - has_tar = True - - if not has_tar: - self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename)) + # Ensure the Files field contain only what's expected + self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files)) # Ensure source is newer than existing source in target suites session = DBConn().session() @@ -1064,16 +1055,19 @@ class Upload(object): if not os.path.exists(src): return ftype = m.group(3) - if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz: + if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \ + pkg.orig_files[f].has_key("path"): continue dest = os.path.join(os.getcwd(), f) os.symlink(src, dest) - # If the orig.tar.gz is not a part of the upload, create a symlink to the - # existing copy. - if self.pkg.orig_tar_gz: - dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz)) - os.symlink(self.pkg.orig_tar_gz, dest) + # If the orig files are not a part of the upload, create symlinks to the + # existing copies. + for orig_file in self.pkg.orig_files.keys(): + if not self.pkg.orig_files[orig_file].has_key("path"): + continue + dest = os.path.join(os.getcwd(), os.path.basename(orig_file)) + os.symlink(self.pkg.orig_files[orig_file]["path"], dest) # Extract the source cmd = "dpkg-source -sn -x %s" % (dsc_filename) @@ -1116,10 +1110,11 @@ class Upload(object): # We should probably scrap or rethink the whole reprocess thing # Bail out if: # a) there's no source - # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files' - # or c) the orig.tar.gz is MIA + # or b) reprocess is 2 - we will do this check next time when orig + # tarball is in 'files' + # or c) the orig files are MIA if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \ - or self.pkg.orig_tar_gz == -1: + or len(self.pkg.orig_files) == 0: return tmpdir = utils.temp_dirname() @@ -1320,19 +1315,19 @@ class Upload(object): sourcepkg, trans) if current is not None: - currentlymsg = "at version %s" % (current) + currentlymsg = "at version %s" % (current.version) else: currentlymsg = "not present in testing" rejectmsg += "Transition description: %s\n\n" % (t["reason"]) rejectmsg += "\n".join(textwrap.wrap("""Your package - is part of a testing transition designed to get %s migrated (it is - currently %s, we need version %s). This transition is managed by the - Release Team, and %s is the Release-Team member responsible for it. - Please mail debian-release@lists.debian.org or contact %s directly if you - need further assistance. You might want to upload to experimental until this - transition is done.""" +is part of a testing transition designed to get %s migrated (it is +currently %s, we need version %s). This transition is managed by the +Release Team, and %s is the Release-Team member responsible for it. +Please mail debian-release@lists.debian.org or contact %s directly if you +need further assistance. You might want to upload to experimental until this +transition is done.""" % (source, currentlymsg, expected,t["rm"], t["rm"]))) self.rejects.append(rejectmsg) @@ -1474,6 +1469,7 @@ class Upload(object): for bug in bugs: summary += "%s " % (bug) if action: + self.update_subst() self.Subst["__BUG_NUMBER__"] = bug if self.pkg.changes["distribution"].has_key("stable"): self.Subst["__STABLE_WARNING__"] = """ @@ -1484,8 +1480,8 @@ The update will eventually make its way into the next released Debian distribution.""" else: self.Subst["__STABLE_WARNING__"] = "" - mail_message = utils.TemplateSubst(self.Subst, template) - utils.send_mail(mail_message) + mail_message = utils.TemplateSubst(self.Subst, template) + utils.send_mail(mail_message) # Clear up after ourselves del self.Subst["__BUG_NUMBER__"] @@ -1537,6 +1533,7 @@ distribution.""" summary += "Announcing to %s\n" % (announce_list) if action: + self.update_subst() self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list if cnf.get("Dinstall::TrackingServer") and \ self.pkg.changes["architecture"].has_key("source"): @@ -1601,6 +1598,7 @@ distribution.""" # Send accept mail, announce to lists, close bugs and check for # override disparities if not cnf["Dinstall::Options::No-Mail"]: + self.update_subst() self.Subst["__SUITE__"] = "" self.Subst["__SUMMARY__"] = summary mail_message = utils.TemplateSubst(self.Subst, accepttemplate) @@ -1686,6 +1684,7 @@ distribution.""" overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity') + self.update_subst() self.Subst["__SUMMARY__"] = summary mail_message = utils.TemplateSubst(self.Subst, overridetemplate) utils.send_mail(mail_message) @@ -1832,6 +1831,7 @@ distribution.""" rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected") + self.update_subst() if not manual: self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] self.Subst["__MANUAL_REJECT_MESSAGE__"] = "" @@ -2051,7 +2051,7 @@ distribution.""" """ @warning: NB: this function can remove entries from the 'files' index [if - the .orig.tar.gz is a duplicate of the one in the archive]; if + the orig tarball is a duplicate of the one in the archive]; if you're iterating over 'files' and call this function as part of the loop, be sure to add a check to the top of the loop to ensure you haven't just tried to dereference the deleted entry. @@ -2059,7 +2059,8 @@ distribution.""" """ Cnf = Config() - self.pkg.orig_tar_gz = None + self.pkg.orig_files = {} # XXX: do we need to clear it? + orig_files = self.pkg.orig_files # Try and find all files mentioned in the .dsc. This has # to work harder to cope with the multiple possible @@ -2093,7 +2094,7 @@ distribution.""" if len(ql) > 0: # Ignore exact matches for .orig.tar.gz match = 0 - if dsc_name.endswith(".orig.tar.gz"): + if re_is_orig_source.match(dsc_name): for i in ql: if self.pkg.files.has_key(dsc_name) and \ int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \ @@ -2103,13 +2104,15 @@ distribution.""" # This would fix the stupidity of changing something we often iterate over # whilst we're doing it del self.pkg.files[dsc_name] - self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename) + if not orig_files.has_key(dsc_name): + orig_files[dsc_name] = {} + orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename) match = 1 if not match: self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name)) - elif dsc_name.endswith(".orig.tar.gz"): + elif re_is_orig_source.match(dsc_name): # Check in the pool ql = get_poolfile_like_name(dsc_name, session) @@ -2147,9 +2150,11 @@ distribution.""" # need this for updating dsc_files in install() dsc_entry["files id"] = x.file_id # See install() in process-accepted... - self.pkg.orig_tar_id = x.file_id - self.pkg.orig_tar_gz = old_file - self.pkg.orig_tar_location = x.location.location_id + if not orig_files.has_key(dsc_name): + orig_files[dsc_name] = {} + orig_files[dsc_name]["id"] = x.file_id + orig_files[dsc_name]["path"] = old_file + orig_files[dsc_name]["location"] = x.location.location_id else: # TODO: Record the queues and info in the DB so we don't hardcode all this crap # Not there? Check the queue directories... @@ -2163,11 +2168,12 @@ distribution.""" in_otherdir_fh.close() actual_size = os.stat(in_otherdir)[stat.ST_SIZE] found = in_otherdir - self.pkg.orig_tar_gz = in_otherdir + if not orig_files.has_key(dsc_name): + orig_files[dsc_name] = {} + orig_files[dsc_name]["path"] = in_otherdir if not found: self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name)) - self.pkg.orig_tar_gz = -1 continue else: self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name)) @@ -2243,6 +2249,7 @@ distribution.""" def do_unaccept(self): cnf = Config() + self.update_subst() self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] self.Subst["__REJECT_MESSAGE__"] = self.package_info() self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]