X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fqueue.py;h=03bc7e05d17f4ecf05b1ad8fb3448bd41240ff05;hb=2495bbed9a0904fbe1a15976cad2974b305fb9ce;hp=b11534a0592ac4b7e7f1f8316dbc8a13b7bae9a6;hpb=0dada7c5846e0ae676bbe0b81c1065fe75ca9b86;p=dak.git diff --git a/daklib/queue.py b/daklib/queue.py index b11534a0..03bc7e05 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -38,8 +38,11 @@ import apt_pkg import utils import commands import shutil +import textwrap from types import * +import yaml + from dak_exceptions import * from changes import * from regexes import * @@ -47,7 +50,7 @@ from config import Config from holding import Holding from dbconn import * from summarystats import SummaryStats -from utils import parse_changes +from utils import parse_changes, check_dsc_files from textutils import fix_maintainer from binary import Binary @@ -70,7 +73,7 @@ def get_type(f, session): # Determine the type if f.has_key("dbtype"): file_type = file["dbtype"] - elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]: + elif re_source_ext.match(f["type"]): file_type = "dsc" else: utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type)) @@ -687,11 +690,12 @@ class Upload(object): # Check the version and for file overwrites self.check_binary_against_db(f, session) - b = Binary(f) - b.scan_package() - if len(b.rejects) > 0: - for j in b.rejects: - self.rejects.append(j) + # Temporarily disable contents generation until we change the table storage layout + #b = Binary(f) + #b.scan_package() + #if len(b.rejects) > 0: + # for j in b.rejects: + # self.rejects.append(j) def source_file_checks(self, f, session): entry = self.pkg.files[f] @@ -709,7 +713,7 @@ class Upload(object): self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"])) # Ensure the source version matches the version in the .changes file - if entry["type"] == "orig.tar.gz": + if re_is_orig_source.match(f): changes_version = self.pkg.changes["chopversion2"] else: changes_version = self.pkg.changes["chopversion"] @@ -917,7 +921,7 @@ class Upload(object): self.rejects.append("source only uploads are not supported.") ########################################################################### - def check_dsc(self, action=True): + def check_dsc(self, action=True, session=None): """Returns bool indicating whether or not the source changes are valid""" # Ensure there is source to check if not self.pkg.changes["architecture"].has_key("source"): @@ -977,10 +981,11 @@ class Upload(object): if not re_valid_version.match(self.pkg.dsc["version"]): self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"])) - # Bumping the version number of the .dsc breaks extraction by stable's - # dpkg-source. So let's not do that... - if self.pkg.dsc["format"] != "1.0": - self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename)) + # Only a limited list of source formats are allowed in each suite + for dist in self.pkg.changes["distribution"].keys(): + allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ] + if self.pkg.dsc["format"] not in allowed: + self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed))) # Validate the Maintainer field try: @@ -1012,19 +1017,8 @@ class Upload(object): if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]: self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version)) - # Ensure there is a .tar.gz in the .dsc file - has_tar = False - for f in self.pkg.dsc_files.keys(): - m = re_issource.match(f) - if not m: - self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f)) - continue - ftype = m.group(3) - if ftype == "orig.tar.gz" or ftype == "tar.gz": - has_tar = True - - if not has_tar: - self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename)) + # Ensure the Files field contain only what's expected + self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files)) # Ensure source is newer than existing source in target suites session = DBConn().session() @@ -1061,16 +1055,19 @@ class Upload(object): if not os.path.exists(src): return ftype = m.group(3) - if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz: + if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \ + pkg.orig_files[f].has_key("path"): continue dest = os.path.join(os.getcwd(), f) os.symlink(src, dest) - # If the orig.tar.gz is not a part of the upload, create a symlink to the - # existing copy. - if self.pkg.orig_tar_gz: - dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz)) - os.symlink(self.pkg.orig_tar_gz, dest) + # If the orig files are not a part of the upload, create symlinks to the + # existing copies. + for orig_file in self.pkg.orig_files.keys(): + if not self.pkg.orig_files[orig_file].has_key("path"): + continue + dest = os.path.join(os.getcwd(), os.path.basename(orig_file)) + os.symlink(self.pkg.orig_files[orig_file]["path"], dest) # Extract the source cmd = "dpkg-source -sn -x %s" % (dsc_filename) @@ -1113,10 +1110,11 @@ class Upload(object): # We should probably scrap or rethink the whole reprocess thing # Bail out if: # a) there's no source - # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files' - # or c) the orig.tar.gz is MIA + # or b) reprocess is 2 - we will do this check next time when orig + # tarball is in 'files' + # or c) the orig files are MIA if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \ - or self.pkg.orig_tar_gz == -1: + or len(self.pkg.orig_files) == 0: return tmpdir = utils.temp_dirname() @@ -1263,11 +1261,85 @@ class Upload(object): except: self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value)) + ########################################################################### + def check_transition(self, session): + cnf = Config() + + sourcepkg = self.pkg.changes["source"] + + # No sourceful upload -> no need to do anything else, direct return + # We also work with unstable uploads, not experimental or those going to some + # proposed-updates queue + if "source" not in self.pkg.changes["architecture"] or \ + "unstable" not in self.pkg.changes["distribution"]: + return + + # Also only check if there is a file defined (and existant) with + # checks. + transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "") + if transpath == "" or not os.path.exists(transpath): + return + + # Parse the yaml file + sourcefile = file(transpath, 'r') + sourcecontent = sourcefile.read() + try: + transitions = yaml.load(sourcecontent) + except yaml.YAMLError, msg: + # This shouldn't happen, there is a wrapper to edit the file which + # checks it, but we prefer to be safe than ending up rejecting + # everything. + utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg)) + return + + # Now look through all defined transitions + for trans in transitions: + t = transitions[trans] + source = t["source"] + expected = t["new"] + + # Will be None if nothing is in testing. + current = get_source_in_suite(source, "testing", session) + if current is not None: + compare = apt_pkg.VersionCompare(current.version, expected) + + if current is None or compare < 0: + # This is still valid, the current version in testing is older than + # the new version we wait for, or there is none in testing yet + + # Check if the source we look at is affected by this. + if sourcepkg in t['packages']: + # The source is affected, lets reject it. + + rejectmsg = "%s: part of the %s transition.\n\n" % ( + sourcepkg, trans) + + if current is not None: + currentlymsg = "at version %s" % (current.version) + else: + currentlymsg = "not present in testing" + + rejectmsg += "Transition description: %s\n\n" % (t["reason"]) + + rejectmsg += "\n".join(textwrap.wrap("""Your package +is part of a testing transition designed to get %s migrated (it is +currently %s, we need version %s). This transition is managed by the +Release Team, and %s is the Release-Team member responsible for it. +Please mail debian-release@lists.debian.org or contact %s directly if you +need further assistance. You might want to upload to experimental until this +transition is done.""" + % (source, currentlymsg, expected,t["rm"], t["rm"]))) + + self.rejects.append(rejectmsg) + return + ########################################################################### def check_signed_by_key(self): """Ensure the .changes is signed by an authorized uploader.""" session = DBConn().session() + self.check_transition(session) + (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session) # match claimed name with actual name: @@ -1397,6 +1469,7 @@ class Upload(object): for bug in bugs: summary += "%s " % (bug) if action: + self.update_subst() self.Subst["__BUG_NUMBER__"] = bug if self.pkg.changes["distribution"].has_key("stable"): self.Subst["__STABLE_WARNING__"] = """ @@ -1407,8 +1480,8 @@ The update will eventually make its way into the next released Debian distribution.""" else: self.Subst["__STABLE_WARNING__"] = "" - mail_message = utils.TemplateSubst(self.Subst, template) - utils.send_mail(mail_message) + mail_message = utils.TemplateSubst(self.Subst, template) + utils.send_mail(mail_message) # Clear up after ourselves del self.Subst["__BUG_NUMBER__"] @@ -1460,6 +1533,7 @@ distribution.""" summary += "Announcing to %s\n" % (announce_list) if action: + self.update_subst() self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list if cnf.get("Dinstall::TrackingServer") and \ self.pkg.changes["architecture"].has_key("source"): @@ -1524,6 +1598,7 @@ distribution.""" # Send accept mail, announce to lists, close bugs and check for # override disparities if not cnf["Dinstall::Options::No-Mail"]: + self.update_subst() self.Subst["__SUITE__"] = "" self.Subst["__SUMMARY__"] = summary mail_message = utils.TemplateSubst(self.Subst, accepttemplate) @@ -1609,6 +1684,7 @@ distribution.""" overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity') + self.update_subst() self.Subst["__SUMMARY__"] = summary mail_message = utils.TemplateSubst(self.Subst, overridetemplate) utils.send_mail(mail_message) @@ -1755,6 +1831,7 @@ distribution.""" rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected") + self.update_subst() if not manual: self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] self.Subst["__MANUAL_REJECT_MESSAGE__"] = "" @@ -1974,7 +2051,7 @@ distribution.""" """ @warning: NB: this function can remove entries from the 'files' index [if - the .orig.tar.gz is a duplicate of the one in the archive]; if + the orig tarball is a duplicate of the one in the archive]; if you're iterating over 'files' and call this function as part of the loop, be sure to add a check to the top of the loop to ensure you haven't just tried to dereference the deleted entry. @@ -1982,7 +2059,8 @@ distribution.""" """ Cnf = Config() - self.pkg.orig_tar_gz = None + self.pkg.orig_files = {} # XXX: do we need to clear it? + orig_files = self.pkg.orig_files # Try and find all files mentioned in the .dsc. This has # to work harder to cope with the multiple possible @@ -2016,7 +2094,7 @@ distribution.""" if len(ql) > 0: # Ignore exact matches for .orig.tar.gz match = 0 - if dsc_name.endswith(".orig.tar.gz"): + if re_is_orig_source.match(dsc_name): for i in ql: if self.pkg.files.has_key(dsc_name) and \ int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \ @@ -2026,13 +2104,15 @@ distribution.""" # This would fix the stupidity of changing something we often iterate over # whilst we're doing it del self.pkg.files[dsc_name] - self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename) + if not orig_files.has_key(dsc_name): + orig_files[dsc_name] = {} + orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename) match = 1 if not match: self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name)) - elif dsc_name.endswith(".orig.tar.gz"): + elif re_is_orig_source.match(dsc_name): # Check in the pool ql = get_poolfile_like_name(dsc_name, session) @@ -2070,9 +2150,11 @@ distribution.""" # need this for updating dsc_files in install() dsc_entry["files id"] = x.file_id # See install() in process-accepted... - self.pkg.orig_tar_id = x.file_id - self.pkg.orig_tar_gz = old_file - self.pkg.orig_tar_location = x.location.location_id + if not orig_files.has_key(dsc_name): + orig_files[dsc_name] = {} + orig_files[dsc_name]["id"] = x.file_id + orig_files[dsc_name]["path"] = old_file + orig_files[dsc_name]["location"] = x.location.location_id else: # TODO: Record the queues and info in the DB so we don't hardcode all this crap # Not there? Check the queue directories... @@ -2086,11 +2168,12 @@ distribution.""" in_otherdir_fh.close() actual_size = os.stat(in_otherdir)[stat.ST_SIZE] found = in_otherdir - self.pkg.orig_tar_gz = in_otherdir + if not orig_files.has_key(dsc_name): + orig_files[dsc_name] = {} + orig_files[dsc_name]["path"] = in_otherdir if not found: self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name)) - self.pkg.orig_tar_gz = -1 continue else: self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name)) @@ -2166,6 +2249,7 @@ distribution.""" def do_unaccept(self): cnf = Config() + self.update_subst() self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] self.Subst["__REJECT_MESSAGE__"] = self.package_info() self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]