]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/queue.py
Exception handling
[dak.git] / daklib / queue.py
old mode 100644 (file)
new mode 100755 (executable)
index 0a03d1f..f35ee18
@@ -22,6 +22,7 @@
 import cPickle, errno, os, pg, re, stat, sys, time
 import apt_inst, apt_pkg
 import utils, database
+from dak_exceptions import *
 
 from types import *
 
@@ -31,6 +32,110 @@ re_isanum = re.compile (r"^\d+$")
 re_default_answer = re.compile(r"\[(.*)\]")
 re_fdnic = re.compile(r"\n\n")
 re_bin_only_nmu = re.compile(r"\+b\d+$")
+
+################################################################################
+
+# Determine what parts in a .changes are NEW
+
+def determine_new(changes, files, projectB, warn=1):
+    new = {}
+
+    # Build up a list of potentially new things
+    for file_entry in files.keys():
+        f = files[file_entry]
+        # Skip byhand elements
+        if f["type"] == "byhand":
+            continue
+        pkg = f["package"]
+        priority = f["priority"]
+        section = f["section"]
+        file_type = get_type(f)
+        component = f["component"]
+
+        if file_type == "dsc":
+            priority = "source"
+        if not new.has_key(pkg):
+            new[pkg] = {}
+            new[pkg]["priority"] = priority
+            new[pkg]["section"] = section
+            new[pkg]["type"] = file_type
+            new[pkg]["component"] = component
+            new[pkg]["files"] = []
+        else:
+            old_type = new[pkg]["type"]
+            if old_type != file_type:
+                # source gets trumped by deb or udeb
+                if old_type == "dsc":
+                    new[pkg]["priority"] = priority
+                    new[pkg]["section"] = section
+                    new[pkg]["type"] = file_type
+                    new[pkg]["component"] = component
+        new[pkg]["files"].append(file_entry)
+        if f.has_key("othercomponents"):
+            new[pkg]["othercomponents"] = f["othercomponents"]
+
+    for suite in changes["suite"].keys():
+        suite_id = database.get_suite_id(suite)
+        for pkg in new.keys():
+            component_id = database.get_component_id(new[pkg]["component"])
+            type_id = database.get_override_type_id(new[pkg]["type"])
+            q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
+            ql = q.getresult()
+            if ql:
+                for file_entry in new[pkg]["files"]:
+                    if files[file_entry].has_key("new"):
+                        del files[file_entry]["new"]
+                del new[pkg]
+
+    if warn:
+        if changes["suite"].has_key("stable"):
+            print "WARNING: overrides will be added for stable!"
+            if changes["suite"].has_key("oldstable"):
+                print "WARNING: overrides will be added for OLDstable!"
+        for pkg in new.keys():
+            if new[pkg].has_key("othercomponents"):
+                print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
+
+    return new
+
+################################################################################
+
+def get_type(f):
+    # Determine the type
+    if f.has_key("dbtype"):
+        file_type = f["dbtype"]
+    elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+        file_type = "dsc"
+    else:
+        utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
+
+    # Validate the override type
+    type_id = database.get_override_type_id(file_type)
+    if type_id == -1:
+        utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
+
+    return file_type
+
+################################################################################
+
+# check if section/priority values are valid
+
+def check_valid(new):
+    for pkg in new.keys():
+        section = new[pkg]["section"]
+        priority = new[pkg]["priority"]
+        file_type = new[pkg]["type"]
+        new[pkg]["section id"] = database.get_section_id(section)
+        new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
+        # Sanity checks
+        di = section.find("debian-installer") != -1
+        if (di and file_type != "udeb") or (not di and file_type == "udeb"):
+            new[pkg]["section id"] = -1
+        if (priority == "source" and file_type != "dsc") or \
+           (priority != "source" and file_type == "dsc"):
+            new[pkg]["priority id"] = -1
+
+
 ###############################################################################
 
 # Convenience wrapper to carry around all the package information in
@@ -44,56 +149,10 @@ class Pkg:
 
 ###############################################################################
 
-class nmu_p:
-    # Read in the group maintainer override file
-    def __init__ (self, Cnf):
-        self.group_maint = {}
-        self.Cnf = Cnf
-        if Cnf.get("Dinstall::GroupOverrideFilename"):
-            filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
-            file = utils.open_file(filename)
-            for line in file.readlines():
-                line = utils.re_comments.sub('', line).lower().strip()
-                if line != "":
-                    self.group_maint[line] = 1
-            file.close()
-
-    def is_an_nmu (self, pkg):
-        Cnf = self.Cnf
-        changes = pkg.changes
-        dsc = pkg.dsc
-
-        i = utils.fix_maintainer (dsc.get("maintainer",
-                                          Cnf["Dinstall::MyEmailAddress"]).lower())
-        (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
-        # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
-        if dsc_name == changes["maintainername"].lower() and \
-           (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
-            return 0
-
-        if dsc.has_key("uploaders"):
-            uploaders = dsc["uploaders"].lower().split(",")
-            uploadernames = {}
-            for i in uploaders:
-                (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
-                uploadernames[name] = ""
-            if uploadernames.has_key(changes["changedbyname"].lower()):
-                return 0
-
-        # Some group maintained packages (e.g. Debian QA) are never NMU's
-        if self.group_maint.has_key(changes["maintaineremail"].lower()):
-            return 0
-
-        return 1
-
-###############################################################################
-
 class Upload:
 
     def __init__(self, Cnf):
         self.Cnf = Cnf
-        # Read in the group-maint override file
-        self.nmu = nmu_p(Cnf)
         self.accept_count = 0
         self.accept_bytes = 0L
         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
@@ -156,15 +215,15 @@ class Upload:
         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
             exec "%s = {}" % i
         ## files
-        for file in files.keys():
-            d_files[file] = {}
+        for file_entry in files.keys():
+            d_files[file_entry] = {}
             for i in [ "package", "version", "architecture", "type", "size",
                        "md5sum", "component", "location id", "source package",
                        "source version", "maintainer", "dbtype", "files id",
                        "new", "section", "priority", "othercomponents",
                        "pool name", "original component" ]:
-                if files[file].has_key(i):
-                    d_files[file][i] = files[file][i]
+                if files[file_entry].has_key(i):
+                    d_files[file_entry][i] = files[file_entry][i]
         ## changes
         # Mandatory changes fields
         for i in [ "distribution", "source", "architecture", "version",
@@ -174,24 +233,25 @@ class Upload:
                    "closes", "changes" ]:
             d_changes[i] = changes[i]
         # Optional changes fields
-        for i in [ "changed-by", "filecontents", "format", "process-new note", "distribution-version" ]:
+        for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
+                   "sponsoremail" ]:
             if changes.has_key(i):
                 d_changes[i] = changes[i]
         ## dsc
         for i in [ "source", "version", "maintainer", "fingerprint",
-                   "uploaders", "bts changelog" ]:
+                   "uploaders", "bts changelog", "dm-upload-allowed" ]:
             if dsc.has_key(i):
                 d_dsc[i] = dsc[i]
         ## dsc_files
-        for file in dsc_files.keys():
-            d_dsc_files[file] = {}
+        for file_entry in dsc_files.keys():
+            d_dsc_files[file_entry] = {}
             # Mandatory dsc_files fields
             for i in [ "size", "md5sum" ]:
-                d_dsc_files[file][i] = dsc_files[file][i]
+                d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
             # Optional dsc_files fields
             for i in [ "files id" ]:
-                if dsc_files[file].has_key(i):
-                    d_dsc_files[file][i] = dsc_files[file][i]
+                if dsc_files[file_entry].has_key(i):
+                    d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
 
         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
@@ -226,6 +286,10 @@ class Upload:
             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
+
+        if "sponsoremail" in changes:
+            Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
+
         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
 
@@ -251,25 +315,34 @@ class Upload:
         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
             changes["distribution"] = {}
 
+        override_summary ="";
         file_keys = files.keys()
         file_keys.sort()
-        for file in file_keys:
-            if files[file].has_key("byhand"):
+        for file_entry in file_keys:
+            if files[file_entry].has_key("byhand"):
                 byhand = 1
-                summary += file + " byhand\n"
-            elif files[file].has_key("new"):
+                summary += file_entry + " byhand\n"
+            elif files[file_entry].has_key("new"):
                 new = 1
-                summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
-                if files[file].has_key("othercomponents"):
-                    summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
-                if files[file]["type"] == "deb":
-                    deb_fh = utils.open_file(file)
+                summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
+                if files[file_entry].has_key("othercomponents"):
+                    summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
+                if files[file_entry]["type"] == "deb":
+                    deb_fh = utils.open_file(file_entry)
                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
                     deb_fh.close()
             else:
-                files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
-                destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
-                summary += file + "\n  to " + destination + "\n"
+                files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
+                destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
+                summary += file_entry + "\n  to " + destination + "\n"
+                if not files[file_entry].has_key("type"):
+                    files[file_entry]["type"] = "unknown"
+                if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
+                    # (queue/unchecked), there we have override entries already, use them
+                    # (process-new), there we dont have override entries, use the newly generated ones.
+                    override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
+                    override_sect = files[file_entry].get("override section", files[file_entry]["section"])
+                    override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
 
         short_summary = summary
 
@@ -279,6 +352,8 @@ class Upload:
         if byhand or new:
             summary += "Changes: " + f
 
+        summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
+
         summary += self.announce(short_summary, 0)
 
         return (summary, short_summary)
@@ -296,55 +371,26 @@ class Upload:
             return summary
 
         bugs.sort()
-        if not self.nmu.is_an_nmu(self.pkg):
-            if changes["distribution"].has_key("experimental"):
-               # tag bugs as fixed-in-experimental for uploads to experimental
-               summary += "Setting bugs to severity fixed: "
-               control_message = ""
-               for bug in bugs:
-                   summary += "%s " % (bug)
-                   control_message += "tag %s + fixed-in-experimental\n" % (bug)
-               if action and control_message != "":
-                   Subst["__CONTROL_MESSAGE__"] = control_message
-                   mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
-                   utils.send_mail (mail_message)
-               if action:
-                   self.Logger.log(["setting bugs to fixed"]+bugs)
-
-
-           else:
-               summary += "Closing bugs: "
-               for bug in bugs:
-                   summary += "%s " % (bug)
-                   if action:
-                       Subst["__BUG_NUMBER__"] = bug
-                       if changes["distribution"].has_key("stable"):
-                           Subst["__STABLE_WARNING__"] = """
+        summary += "Closing bugs: "
+        for bug in bugs:
+            summary += "%s " % (bug)
+            if action:
+                Subst["__BUG_NUMBER__"] = bug
+                if changes["distribution"].has_key("stable"):
+                    Subst["__STABLE_WARNING__"] = """
 Note that this package is not part of the released stable Debian
 distribution.  It may have dependencies on other unreleased software,
 or other instabilities.  Please take care if you wish to install it.
 The update will eventually make its way into the next released Debian
 distribution."""
-                       else:
-                           Subst["__STABLE_WARNING__"] = ""
-                           mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
-                           utils.send_mail (mail_message)
-                if action:
-                    self.Logger.log(["closing bugs"]+bugs)
-
-       else:                     # NMU
-            summary += "Setting bugs to severity fixed: "
-            control_message = ""
-            for bug in bugs:
-                summary += "%s " % (bug)
-                control_message += "tag %s + fixed\n" % (bug)
-            if action and control_message != "":
-                Subst["__CONTROL_MESSAGE__"] = control_message
-                mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
-                utils.send_mail (mail_message)
-            if action:
-                self.Logger.log(["setting bugs to fixed"]+bugs)
+                else:
+                    Subst["__STABLE_WARNING__"] = ""
+                    mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
+                    utils.send_mail (mail_message)
+        if action:
+            self.Logger.log(["closing bugs"]+bugs)
         summary += "\n"
+
         return summary
 
     ###########################################################################
@@ -363,14 +409,14 @@ distribution."""
         Subst["__SHORT_SUMMARY__"] = short_summary
 
         for dist in changes["distribution"].keys():
-            list = Cnf.Find("Suite::%s::Announce" % (dist))
-            if list == "" or lists_done.has_key(list):
+            announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
+            if announce_list == "" or lists_done.has_key(announce_list):
                 continue
-            lists_done[list] = 1
-            summary += "Announcing to %s\n" % (list)
+            lists_done[announce_list] = 1
+            summary += "Announcing to %s\n" % (announce_list)
 
             if action:
-                Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
+                Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
@@ -399,9 +445,9 @@ distribution."""
         # Move all the files into the accepted directory
         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
         file_keys = files.keys()
-        for file in file_keys:
-            utils.move(file, Cnf["Dir::Queue::Accepted"])
-            self.accept_bytes += float(files[file]["size"])
+        for file_entry in file_keys:
+            utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
+            self.accept_bytes += float(files[file_entry]["size"])
         self.accept_count += 1
 
         # Send accept mail, announce to lists, close bugs and check for
@@ -437,8 +483,8 @@ distribution."""
             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
                                                 dotprefix=1, perms=0644)
             debinfo = utils.open_file(temp_filename, 'w')
-            for file in file_keys:
-                f = files[file]
+            for file_entry in file_keys:
+                f = files[file_entry]
                 if f["type"] == "deb":
                     line = " ".join([f["package"], f["version"],
                                      f["architecture"], f["source package"],
@@ -473,9 +519,9 @@ distribution."""
             dest_dir = Cnf["Dir::QueueBuild"]
             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
                 dest_dir = os.path.join(dest_dir, suite)
-            for file in file_keys:
-                src = os.path.join(path, file)
-                dest = os.path.join(dest_dir, file)
+            for file_entry in file_keys:
+                src = os.path.join(path, file_entry)
+                dest = os.path.join(dest_dir, file_entry)
                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
                     # Copy it since the original won't be readable by www-data
                     utils.copy(src, dest)
@@ -529,19 +575,16 @@ distribution."""
         summary = ""
         file_keys = files.keys()
         file_keys.sort()
-        for file in file_keys:
-            if not files[file].has_key("new") and files[file]["type"] == "deb":
-                section = files[file]["section"]
-                override_section = files[file]["override section"]
+        for file_entry in file_keys:
+            if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
+                section = files[file_entry]["section"]
+                override_section = files[file_entry]["override section"]
                 if section.lower() != override_section.lower() and section != "-":
-                    # Ignore this; it's a common mistake and not worth whining about
-                    if section.lower() == "non-us/main" and override_section.lower() == "non-us":
-                        continue
-                    summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
-                priority = files[file]["priority"]
-                override_priority = files[file]["override priority"]
+                    summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
+                priority = files[file_entry]["priority"]
+                override_priority = files[file_entry]["override priority"]
                 if priority != override_priority and priority != "-":
-                    summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
+                    summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
 
         if summary == "":
             return
@@ -560,36 +603,36 @@ distribution."""
 
         Cnf = self.Cnf
 
-        for file in files:
+        for file_entry in files:
             # Skip any files which don't exist or which we don't have permission to copy.
-            if os.access(file,os.R_OK) == 0:
+            if os.access(file_entry,os.R_OK) == 0:
                 continue
-            dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
+            dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
             try:
                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
             except OSError, e:
                 # File exists?  Let's try and move it to the morgue
                 if errno.errorcode[e.errno] == 'EEXIST':
-                    morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
+                    morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
                     try:
                         morgue_file = utils.find_next_free(morgue_file)
-                    except utils.tried_too_hard_exc:
+                    except NoFreeFilenameError:
                         # Something's either gone badly Pete Tong, or
                         # someone is trying to exploit us.
-                        utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
+                        utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
                         return
                     utils.move(dest_file, morgue_file, perms=0660)
                     try:
                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
                     except OSError, e:
                         # Likewise
-                        utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
+                        utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
                         return
                 else:
                     raise
             # If we got here, we own the destination file, so we can
             # safely overwrite it.
-            utils.move(file, dest_file, 1, perms=0660)
+            utils.move(file_entry, dest_file, 1, perms=0660)
             os.close(dest_fd)
 
     ###########################################################################
@@ -644,9 +687,9 @@ distribution."""
         if not manual:
             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
-            Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
+            Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
             os.write(reason_fd, reject_message)
-            reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
+            reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
         else:
             # Build up the rejection email
             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
@@ -654,7 +697,7 @@ distribution."""
             Subst["__REJECTOR_ADDRESS__"] = user_email_address
             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
-            reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
+            reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
             # Write the rejection email out as the <foo>.reason file
             os.write(reason_fd, reject_mail_message)
 
@@ -676,26 +719,26 @@ distribution."""
     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
 
     def source_exists (self, package, source_version, suites = ["any"]):
-       okay = 1
-       for suite in suites:
-           if suite == "any":
-               que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
-                   (package)
-           else:
-               # source must exist in suite X, or in some other suite that's
-               # mapped to X, recursively... silent-maps are counted too,
-               # unreleased-maps aren't.
-               maps = self.Cnf.ValueList("SuiteMappings")[:]
-               maps.reverse()
-               maps = [ m.split() for m in maps ]
-               maps = [ (x[1], x[2]) for x in maps
-                               if x[0] == "map" or x[0] == "silent-map" ]
-               s = [suite]
-               for x in maps:
-                       if x[1] in s and x[0] not in s:
-                               s.append(x[0])
-
-               que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
+        okay = 1
+        for suite in suites:
+            if suite == "any":
+                que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
+                    (package)
+            else:
+                # source must exist in suite X, or in some other suite that's
+                # mapped to X, recursively... silent-maps are counted too,
+                # unreleased-maps aren't.
+                maps = self.Cnf.ValueList("SuiteMappings")[:]
+                maps.reverse()
+                maps = [ m.split() for m in maps ]
+                maps = [ (x[1], x[2]) for x in maps
+                                if x[0] == "map" or x[0] == "silent-map" ]
+                s = [suite]
+                for x in maps:
+                    if x[1] in s and x[0] not in s:
+                        s.append(x[0])
+
+                que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
             q = self.projectB.query(que)
 
             # Reduce the query results to a list of version numbers
@@ -712,18 +755,18 @@ distribution."""
 
             # No source found...
             okay = 0
-           break
-       return okay
+            break
+        return okay
 
     ################################################################################
-    
+
     def in_override_p (self, package, component, suite, binary_type, file):
         files = self.pkg.files
 
         if binary_type == "": # must be source
-            type = "dsc"
+            file_type = "dsc"
         else:
-            type = binary_type
+            file_type = binary_type
 
         # Override suite name; used for example with proposed-updates
         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
@@ -734,17 +777,13 @@ distribution."""
         if suite_id == -1:
             return None
         component_id = database.get_component_id(component)
-        type_id = database.get_override_type_id(type)
-
-        # FIXME: nasty non-US speficic hack
-        if component.lower().startswith("non-us/"):
-            component = component[7:]
+        type_id = database.get_override_type_id(file_type)
 
         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
                            % (package, suite_id, component_id, type_id))
         result = q.getresult()
         # If checking for a source package fall back on the binary override type
-        if type == "dsc" and not result:
+        if file_type == "dsc" and not result:
             deb_type_id = database.get_override_type_id("deb")
             udeb_type_id = database.get_override_type_id("udeb")
             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
@@ -804,12 +843,12 @@ distribution."""
                     ch = self.pkg.changes
                     cansave = 0
                     if ch.get('distribution-version', {}).has_key(suite):
-                        # we really use the other suite, ignoring the conflicting one ...
+                    # we really use the other suite, ignoring the conflicting one ...
                         addsuite = ch["distribution-version"][suite]
-                    
+
                         add_version = self.get_anyversion(query_result, addsuite)
                         target_version = self.get_anyversion(query_result, target_suite)
-                    
+
                         if not add_version:
                             # not add_version can only happen if we map to a suite
                             # that doesn't enhance the suite we're propup'ing from.
@@ -840,7 +879,7 @@ distribution."""
                             self.pkg.changes.setdefault("propdistribution", {})
                             self.pkg.changes["propdistribution"][addsuite] = 1
                             cansave = 1
-                
+
                     if not cansave:
                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
 
@@ -894,7 +933,7 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
     # the .orig.tar.gz is a duplicate of the one in the archive]; if
     # you're iterating over 'files' and call this function as part of
     # the loop, be sure to add a check to the top of the loop to
-    # ensure you haven't just tried to derefernece the deleted entry.
+    # ensure you haven't just tried to dereference the deleted entry.
     # **WARNING**
 
     def check_dsc_against_db(self, file):
@@ -907,6 +946,8 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
         # Try and find all files mentioned in the .dsc.  This has
         # to work harder to cope with the multiple possible
         # locations of an .orig.tar.gz.
+        # The ordering on the select is needed to pick the newest orig
+        # when it exists in multiple places.
         for dsc_file in dsc_files.keys():
             found = None
             if files.has_key(dsc_file):
@@ -914,7 +955,7 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
                 actual_size = int(files[dsc_file]["size"])
                 found = "%s in incoming" % (dsc_file)
                 # Check the file does not already exist in the archive
-                q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
+                q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
                 ql = q.getresult()
                 # Strip out anything that isn't '%s' or '/%s$'
                 for i in ql:
@@ -993,11 +1034,13 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
 
                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
                     # See process_it() in 'dak process-unchecked' for explanation of this
-                    if os.path.exists(in_unchecked):
+                    # in_unchecked check dropped by ajt 2007-08-28, how did that
+                    # ever make sense?
+                    if os.path.exists(in_unchecked) and False:
                         return (self.reject_message, in_unchecked)
                     else:
-                        for dir in [ "Accepted", "New", "Byhand" ]:
-                            in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
+                        for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
+                            in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
                             if os.path.exists(in_otherdir):
                                 in_otherdir_fh = utils.open_file(in_otherdir)
                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)