]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/queue.py
add check_poolfile routine and fix session usage
[dak.git] / daklib / queue.py
old mode 100644 (file)
new mode 100755 (executable)
index ea8f676..babaf66
@@ -1,7 +1,14 @@
 #!/usr/bin/env python
+# vim:set et sw=4:
 
-# Queue utility functions for dak
-# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
+"""
+Queue utility functions for dak
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2001 - 2006 James Troup <james@nocrew.org>
+@copyright: 2009  Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
 
 # This program is free software; you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
 
 ###############################################################################
 
-import cPickle, errno, os, pg, re, stat, sys, time
-import apt_inst, apt_pkg
-import utils, database
-
+import cPickle
+import errno
+import os
+import pg
+import stat
+import sys
+import time
+import apt_inst
+import apt_pkg
+import utils
 from types import *
 
+from dak_exceptions import *
+from changes import *
+from regexes import re_default_answer, re_fdnic, re_bin_only_nmu, re_strip_srcver, re_valid_pkg_name, re_isanum, re_no_epoch, re_no_revision
+from config import Config
+from dbconn import *
+from summarystats import SummaryStats
+from utils import parse_changes
+from textutils import fix_maintainer
+
 ###############################################################################
 
-re_isanum = re.compile (r"^\d+$")
-re_default_answer = re.compile(r"\[(.*)\]")
-re_fdnic = re.compile(r"\n\n")
-re_bin_only_nmu = re.compile(r"\+b\d+$")
+def get_type(f, session=None):
+    """
+    Get the file type of C{f}
 
-###############################################################################
+    @type f: dict
+    @param f: file entry from Changes object
 
-# Convenience wrapper to carry around all the package information in
+    @rtype: string
+    @return: filetype
 
-class Pkg:
-    def __init__(self, **kwds):
-        self.__dict__.update(kwds)
+    """
+    if session is None:
+        session = DBConn().session()
 
-    def update(self, **kwds):
-        self.__dict__.update(kwds)
+    # Determine the type
+    if f.has_key("dbtype"):
+        file_type = file["dbtype"]
+    elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+        file_type = "dsc"
+    else:
+        utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
 
-###############################################################################
+    # Validate the override type
+    type_id = get_override_type(file_type, session)
+    if type_id is None:
+        utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
 
-class nmu_p:
-    # Read in the group maintainer override file
-    def __init__ (self, Cnf):
-        self.group_maint = {}
-        self.Cnf = Cnf
-        if Cnf.get("Dinstall::GroupOverrideFilename"):
-            filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
-            file = utils.open_file(filename)
-            for line in file.readlines():
-                line = utils.re_comments.sub('', line).lower().strip()
-                if line != "":
-                    self.group_maint[line] = 1
-            file.close()
-
-    def is_an_nmu (self, pkg):
-        Cnf = self.Cnf
-        changes = pkg.changes
-        dsc = pkg.dsc
-
-        i = utils.fix_maintainer (dsc.get("maintainer",
-                                          Cnf["Dinstall::MyEmailAddress"]).lower())
-        (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
-        # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
-        if dsc_name == changes["maintainername"].lower() and \
-           (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
-            return 0
-
-        if dsc.has_key("uploaders"):
-            uploaders = dsc["uploaders"].lower().split(",")
-            uploadernames = {}
-            for i in uploaders:
-                (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
-                uploadernames[name] = ""
-            if uploadernames.has_key(changes["changedbyname"].lower()):
-                return 0
-
-        # Some group maintained packages (e.g. Debian QA) are never NMU's
-        if self.group_maint.has_key(changes["maintaineremail"].lower()):
-            return 0
-
-        return 1
+    return file_type
 
-###############################################################################
+################################################################################
 
-class Upload:
+# Determine what parts in a .changes are NEW
 
-    def __init__(self, Cnf):
-        self.Cnf = Cnf
-        # Read in the group-maint override file
-        self.nmu = nmu_p(Cnf)
-        self.accept_count = 0
-        self.accept_bytes = 0L
-        self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
-                       legacy_source_untouchable = {})
+def determine_new(changes, files, warn=1):
+    """
+    Determine what parts in a C{changes} file are NEW.
 
-        # Initialize the substitution template mapping global
-        Subst = self.Subst = {}
-        Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
-        Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
-        Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
-        Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
+    @type changes: Upload.Pkg.changes dict
+    @param changes: Changes dictionary
 
-        self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
-        database.init(Cnf, self.projectB)
+    @type files: Upload.Pkg.files dict
+    @param files: Files dictionary
 
-    ###########################################################################
+    @type warn: bool
+    @param warn: Warn if overrides are added for (old)stable
 
-    def init_vars (self):
-        for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
-            exec "self.pkg.%s.clear();" % (i)
-        self.pkg.orig_tar_id = None
-        self.pkg.orig_tar_location = ""
-        self.pkg.orig_tar_gz = None
+    @rtype: dict
+    @return: dictionary of NEW components.
 
-    ###########################################################################
+    """
+    new = {}
+
+    session = DBConn().session()
+
+    # Build up a list of potentially new things
+    for name, f in files.items():
+        # Skip byhand elements
+        if f["type"] == "byhand":
+            continue
+        pkg = f["package"]
+        priority = f["priority"]
+        section = f["section"]
+        file_type = get_type(f)
+        component = f["component"]
+
+        if file_type == "dsc":
+            priority = "source"
+
+        if not new.has_key(pkg):
+            new[pkg] = {}
+            new[pkg]["priority"] = priority
+            new[pkg]["section"] = section
+            new[pkg]["type"] = file_type
+            new[pkg]["component"] = component
+            new[pkg]["files"] = []
+        else:
+            old_type = new[pkg]["type"]
+            if old_type != file_type:
+                # source gets trumped by deb or udeb
+                if old_type == "dsc":
+                    new[pkg]["priority"] = priority
+                    new[pkg]["section"] = section
+                    new[pkg]["type"] = file_type
+                    new[pkg]["component"] = component
+
+        new[pkg]["files"].append(name)
+
+        if f.has_key("othercomponents"):
+            new[pkg]["othercomponents"] = f["othercomponents"]
+
+    for suite in changes["suite"].keys():
+        for pkg in new.keys():
+            ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
+            if len(ql) > 0:
+                for file_entry in new[pkg]["files"]:
+                    if files[file_entry].has_key("new"):
+                        del files[file_entry]["new"]
+                del new[pkg]
+
+    if warn:
+        for s in ['stable', 'oldstable']:
+            if changes["suite"].has_key(s):
+                print "WARNING: overrides will be added for %s!" % s
+        for pkg in new.keys():
+            if new[pkg].has_key("othercomponents"):
+                print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
+
+    return new
+
+################################################################################
+
+def check_valid(new):
+    """
+    Check if section and priority for NEW packages exist in database.
+    Additionally does sanity checks:
+      - debian-installer packages have to be udeb (or source)
+      - non debian-installer packages can not be udeb
+      - source priority can only be assigned to dsc file types
+
+    @type new: dict
+    @param new: Dict of new packages with their section, priority and type.
+
+    """
+    for pkg in new.keys():
+        section_name = new[pkg]["section"]
+        priority_name = new[pkg]["priority"]
+        file_type = new[pkg]["type"]
+
+        section = get_section(section_name)
+        if section is None:
+            new[pkg]["section id"] = -1
+        else:
+            new[pkg]["section id"] = section.section_id
+
+        priority = get_priority(priority_name)
+        if priority is None:
+            new[pkg]["priority id"] = -1
+        else:
+            new[pkg]["priority id"] = priority.priority_id
+
+        # Sanity checks
+        di = section_name.find("debian-installer") != -1
 
-    def update_vars (self):
-        dump_filename = self.pkg.changes_file[:-8]+".dak"
-        dump_file = utils.open_file(dump_filename)
-        p = cPickle.Unpickler(dump_file)
-        for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
-            exec "self.pkg.%s.update(p.load());" % (i)
-        for i in [ "orig_tar_id", "orig_tar_location" ]:
-            exec "self.pkg.%s = p.load();" % (i)
-        dump_file.close()
+        # If d-i, we must be udeb and vice-versa
+        if     (di and file_type not in ("udeb", "dsc")) or \
+           (not di and file_type == "udeb"):
+            new[pkg]["section id"] = -1
+
+        # If dsc we need to be source and vice-versa
+        if (priority == "source" and file_type != "dsc") or \
+           (priority != "source" and file_type == "dsc"):
+            new[pkg]["priority id"] = -1
+
+###############################################################################
+
+class Upload(object):
+    """
+    Everything that has to do with an upload processed.
+
+    """
+    def __init__(self):
+        self.pkg = Changes()
+        self.reset()
 
     ###########################################################################
 
-    # This could just dump the dictionaries as is, but I'd like to
-    # avoid this so there's some idea of what process-accepted &
-    # process-new use from process-unchecked
+    def reset (self):
+        """ Reset a number of internal variables."""
 
-    def dump_vars(self, dest_dir):
-        for i in [ "changes", "dsc", "files", "dsc_files",
-                   "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
-            exec "%s = self.pkg.%s;" % (i,i)
-        dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
-        dump_file = utils.open_file(dump_filename, 'w')
-        try:
-            os.chmod(dump_filename, 0660)
-        except OSError, e:
-            if errno.errorcode[e.errno] == 'EPERM':
-                perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
-                if perms & stat.S_IROTH:
-                    utils.fubar("%s is world readable and chmod failed." % (dump_filename))
-            else:
-                raise
-
-        p = cPickle.Pickler(dump_file, 1)
-        for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
-            exec "%s = {}" % i
-        ## files
-        for file in files.keys():
-            d_files[file] = {}
-            for i in [ "package", "version", "architecture", "type", "size",
-                       "md5sum", "component", "location id", "source package",
-                       "source version", "maintainer", "dbtype", "files id",
-                       "new", "section", "priority", "othercomponents",
-                       "pool name", "original component" ]:
-                if files[file].has_key(i):
-                    d_files[file][i] = files[file][i]
-        ## changes
-        # Mandatory changes fields
-        for i in [ "distribution", "source", "architecture", "version",
-                   "maintainer", "urgency", "fingerprint", "changedby822",
-                   "changedby2047", "changedbyname", "maintainer822",
-                   "maintainer2047", "maintainername", "maintaineremail",
-                   "closes", "changes" ]:
-            d_changes[i] = changes[i]
-        # Optional changes fields
-        for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
-            if changes.has_key(i):
-                d_changes[i] = changes[i]
-        ## dsc
-        for i in [ "source", "version", "maintainer", "fingerprint",
-                   "uploaders", "bts changelog" ]:
-            if dsc.has_key(i):
-                d_dsc[i] = dsc[i]
-        ## dsc_files
-        for file in dsc_files.keys():
-            d_dsc_files[file] = {}
-            # Mandatory dsc_files fields
-            for i in [ "size", "md5sum" ]:
-                d_dsc_files[file][i] = dsc_files[file][i]
-            # Optional dsc_files fields
-            for i in [ "files id" ]:
-                if dsc_files[file].has_key(i):
-                    d_dsc_files[file][i] = dsc_files[file][i]
-
-        for i in [ d_changes, d_dsc, d_files, d_dsc_files,
-                   legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
-            p.dump(i)
-        dump_file.close()
+        # Initialize the substitution template map
+        cnf = Config()
+        self.Subst = {}
+        self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
+        self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
+        self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
+        self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
+
+        self.rejects = []
+        self.warnings = []
+        self.notes = []
+
+        self.pkg.reset()
+
+    def package_info(self):
+        msg = ''
+
+        if len(self.rejects) > 0:
+            msg += "Reject Reasons:\n"
+            msg += "\n".join(self.rejects)
+
+        if len(self.warnings) > 0:
+            msg += "Warnings:\n"
+            msg += "\n".join(self.warnings)
+
+        if len(self.notes) > 0:
+            msg += "Notes:\n"
+            msg += "\n".join(self.notes)
+
+        return msg
 
     ###########################################################################
+    def update_subst(self):
+        """ Set up the per-package template substitution mappings """
 
-    # Set up the per-package template substitution mappings
+        cnf = Config()
 
-    def update_subst (self, reject_message = ""):
-        Subst = self.Subst
-        changes = self.pkg.changes
         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
-        if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
-            changes["architecture"] = { "Unknown" : "" }
+        if not self.pkg.changes.has_key("architecture") or not \
+           isinstance(changes["architecture"], DictType):
+            self.pkg.changes["architecture"] = { "Unknown" : "" }
+
         # and maintainer2047 may not exist.
-        if not changes.has_key("maintainer2047"):
-            changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
+        if not self.pkg.changes.has_key("maintainer2047"):
+            self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
 
-        Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
-        Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
-        Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
+        self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
+        self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
+        self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
 
         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
-        if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
-            Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
-            Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
-                                                     changes["maintainer2047"])
-            Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
+        if self.pkg.changes["architecture"].has_key("source") and \
+           self.pkg.changes["changedby822"] != "" and \
+           (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
+
+            self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
+            self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
+            self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
         else:
-            Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
-            Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
-            Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
-        if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
-            Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
+            self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
+            self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
+            self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
+
+        if "sponsoremail" in self.pkg.changes:
+            self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
+
+        if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
+            self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
 
         # Apply any global override of the Maintainer field
-        if self.Cnf.get("Dinstall::OverrideMaintainer"):
-            Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
-            Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
+        if cnf.get("Dinstall::OverrideMaintainer"):
+            self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
+            self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
 
-        Subst["__REJECT_MESSAGE__"] = reject_message
-        Subst["__SOURCE__"] = changes.get("source", "Unknown")
-        Subst["__VERSION__"] = changes.get("version", "Unknown")
+        self.Subst["__REJECT_MESSAGE__"] = self.package_info()
+        self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
+        self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
+
+    ###########################################################################
+    def load_changes(self, filename):
+        """
+        @rtype boolean
+        @rvalue: whether the changes file was valid or not.  We may want to
+                 reject even if this is True (see what gets put in self.rejects).
+                 This is simply to prevent us even trying things later which will
+                 fail because we couldn't properly parse the file.
+        """
+        self.pkg.changes_file = filename
+
+        # Parse the .changes field into a dictionary
+        try:
+            self.pkg.changes.update(parse_changes(filename))
+        except CantOpenError:
+            self.rejects.append("%s: can't read file." % (filename))
+            return False
+        except ParseChangesError, line:
+            self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
+            return False
+        except ChangesUnicodeError:
+            self.rejects.append("%s: changes file not proper utf-8" % (filename))
+            return False
+
+        # Parse the Files field from the .changes into another dictionary
+        try:
+            self.pkg.files.update(build_file_list(self.pkg.changes))
+        except ParseChangesError, line:
+            self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
+            return False
+        except UnknownFormatError, format:
+            self.rejects.append("%s: unknown format '%s'." % (filename, format))
+            return False
+
+        # Check for mandatory fields
+        for i in ("distribution", "source", "binary", "architecture",
+                  "version", "maintainer", "files", "changes", "description"):
+            if not self.pkg.changes.has_key(i):
+                # Avoid undefined errors later
+                self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
+                return False
+
+        # Strip a source version in brackets from the source field
+        if re_strip_srcver.search(self.pkg.changes["source"]):
+            self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
+
+        # Ensure the source field is a valid package name.
+        if not re_valid_pkg_name.match(self.pkg.changes["source"]):
+            self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
+
+        # Split multi-value fields into a lower-level dictionary
+        for i in ("architecture", "distribution", "binary", "closes"):
+            o = self.pkg.changes.get(i, "")
+            if o != "":
+                del self.pkg.changes[i]
+
+            self.pkg.changes[i] = {}
+
+            for j in o.split():
+                self.pkg.changes[i][j] = 1
+
+        # Fix the Maintainer: field to be RFC822/2047 compatible
+        try:
+            (self.pkg.changes["maintainer822"],
+             self.pkg.changes["maintainer2047"],
+             self.pkg.changes["maintainername"],
+             self.pkg.changes["maintaineremail"]) = \
+                   fix_maintainer (self.pkg.changes["maintainer"])
+        except ParseMaintError, msg:
+            self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
+                   % (filename, changes["maintainer"], msg))
+
+        # ...likewise for the Changed-By: field if it exists.
+        try:
+            (self.pkg.changes["changedby822"],
+             self.pkg.changes["changedby2047"],
+             self.pkg.changes["changedbyname"],
+             self.pkg.changes["changedbyemail"]) = \
+                   fix_maintainer (self.pkg.changes.get("changed-by", ""))
+        except ParseMaintError, msg:
+            self.pkg.changes["changedby822"] = ""
+            self.pkg.changes["changedby2047"] = ""
+            self.pkg.changes["changedbyname"] = ""
+            self.pkg.changes["changedbyemail"] = ""
+
+            self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
+                   % (filename, changes["changed-by"], msg))
+
+        # Ensure all the values in Closes: are numbers
+        if self.pkg.changes.has_key("closes"):
+            for i in self.pkg.changes["closes"].keys():
+                if re_isanum.match (i) == None:
+                    self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
+
+        # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
+        self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
+        self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
+
+        # Check there isn't already a changes file of the same name in one
+        # of the queue directories.
+        base_filename = os.path.basename(filename)
+        for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
+            if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename):
+                self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
+
+        # Check the .changes is non-empty
+        if not self.pkg.files:
+            self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
+            return False
+
+        # Changes was syntactically valid even if we'll reject
+        return True
+
+    ###########################################################################
+
+    def check_distributions(self):
+        "Check and map the Distribution field"
+
+        Cnf = Config()
+
+        # Handle suite mappings
+        for m in Cnf.ValueList("SuiteMappings"):
+            args = m.split()
+            mtype = args[0]
+            if mtype == "map" or mtype == "silent-map":
+                (source, dest) = args[1:3]
+                if self.pkg.changes["distribution"].has_key(source):
+                    del self.pkg.changes["distribution"][source]
+                    self.pkg.changes["distribution"][dest] = 1
+                    if mtype != "silent-map":
+                        self.notes.append("Mapping %s to %s." % (source, dest))
+                if self.pkg.changes.has_key("distribution-version"):
+                    if self.pkg.changes["distribution-version"].has_key(source):
+                        self.pkg.changes["distribution-version"][source]=dest
+            elif mtype == "map-unreleased":
+                (source, dest) = args[1:3]
+                if self.pkg.changes["distribution"].has_key(source):
+                    for arch in self.pkg.changes["architecture"].keys():
+                        if arch not in [ arch_string for a in get_suite_architectures(source) ]:
+                            self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
+                            del self.pkg.changes["distribution"][source]
+                            self.pkg.changes["distribution"][dest] = 1
+                            break
+            elif mtype == "ignore":
+                suite = args[1]
+                if self.pkg.changes["distribution"].has_key(suite):
+                    del self.pkg.changes["distribution"][suite]
+                    self.warnings.append("Ignoring %s as a target suite." % (suite))
+            elif mtype == "reject":
+                suite = args[1]
+                if self.pkg.changes["distribution"].has_key(suite):
+                    self.rejects.append("Uploads to %s are not accepted." % (suite))
+            elif mtype == "propup-version":
+                # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
+                #
+                # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
+                if self.pkg.changes["distribution"].has_key(args[1]):
+                    self.pkg.changes.setdefault("distribution-version", {})
+                    for suite in args[2:]:
+                        self.pkg.changes["distribution-version"][suite] = suite
+
+        # Ensure there is (still) a target distribution
+        if len(self.pkg.changes["distribution"].keys()) < 1:
+            self.rejects.append("No valid distribution remaining.")
+
+        # Ensure target distributions exist
+        for suite in self.pkg.changes["distribution"].keys():
+            if not Cnf.has_key("Suite::%s" % (suite)):
+                self.rejects.append("Unknown distribution `%s'." % (suite))
 
     ###########################################################################
 
     def build_summaries(self):
-        changes = self.pkg.changes
-        files = self.pkg.files
-
-        byhand = summary = new = ""
-
-        # changes["distribution"] may not exist in corner cases
-        # (e.g. unreadable changes files)
-        if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
-            changes["distribution"] = {}
-
-        override_summary ="";
-        file_keys = files.keys()
-        file_keys.sort()
-        for file in file_keys:
-            if files[file].has_key("byhand"):
-                byhand = 1
-                summary += file + " byhand\n"
-            elif files[file].has_key("new"):
-                new = 1
-                summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
-                if files[file].has_key("othercomponents"):
-                    summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
-                if files[file]["type"] == "deb":
-                    deb_fh = utils.open_file(file)
-                    summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
-                    deb_fh.close()
-            else:
-                files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
-                destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
-                summary += file + "\n  to " + destination + "\n"
-                if files[file]["type"] in ["deb", "udeb", "dsc"]:
-                    override_summary += "%s - %s %s\n" % (file, files[file]["priority"], files[file]["section"])
+        """ Build a summary of changes the upload introduces. """
+
+        (byhand, new, summary, override_summary) = self.pkg.file_summary()
 
         short_summary = summary
 
         # This is for direport's benefit...
-        f = re_fdnic.sub("\n .\n", changes.get("changes",""))
+        f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
 
         if byhand or new:
             summary += "Changes: " + f
@@ -291,320 +489,323 @@ class Upload:
 
     ###########################################################################
 
-    def close_bugs (self, summary, action):
-        changes = self.pkg.changes
-        Subst = self.Subst
-        Cnf = self.Cnf
+    def close_bugs(self, summary, action):
+        """
+        Send mail to close bugs as instructed by the closes field in the changes file.
+        Also add a line to summary if any work was done.
+
+        @type summary: string
+        @param summary: summary text, as given by L{build_summaries}
+
+        @type action: bool
+        @param action: Set to false no real action will be done.
+
+        @rtype: string
+        @return: summary. If action was taken, extended by the list of closed bugs.
+
+        """
 
-        bugs = changes["closes"].keys()
+        template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
+
+        bugs = self.pkg.changes["closes"].keys()
 
         if not bugs:
             return summary
 
         bugs.sort()
-        if not self.nmu.is_an_nmu(self.pkg):
-            if changes["distribution"].has_key("experimental"):
-               # tag bugs as fixed-in-experimental for uploads to experimental
-               summary += "Setting bugs to severity fixed: "
-               control_message = ""
-               for bug in bugs:
-                   summary += "%s " % (bug)
-                   control_message += "tag %s + fixed-in-experimental\n" % (bug)
-               if action and control_message != "":
-                   Subst["__CONTROL_MESSAGE__"] = control_message
-                   mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
-                   utils.send_mail (mail_message)
-               if action:
-                   self.Logger.log(["setting bugs to fixed"]+bugs)
-
-
-           else:
-               summary += "Closing bugs: "
-               for bug in bugs:
-                   summary += "%s " % (bug)
-                   if action:
-                       Subst["__BUG_NUMBER__"] = bug
-                       if changes["distribution"].has_key("stable"):
-                           Subst["__STABLE_WARNING__"] = """
+        summary += "Closing bugs: "
+        for bug in bugs:
+            summary += "%s " % (bug)
+            if action:
+                self.Subst["__BUG_NUMBER__"] = bug
+                if self.pkg.changes["distribution"].has_key("stable"):
+                    self.Subst["__STABLE_WARNING__"] = """
 Note that this package is not part of the released stable Debian
 distribution.  It may have dependencies on other unreleased software,
 or other instabilities.  Please take care if you wish to install it.
 The update will eventually make its way into the next released Debian
 distribution."""
-                       else:
-                           Subst["__STABLE_WARNING__"] = ""
-                           mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
-                           utils.send_mail (mail_message)
-                if action:
-                    self.Logger.log(["closing bugs"]+bugs)
-
-       else:                     # NMU
-            summary += "Setting bugs to severity fixed: "
-            control_message = ""
-            for bug in bugs:
-                summary += "%s " % (bug)
-                control_message += "tag %s + fixed\n" % (bug)
-            if action and control_message != "":
-                Subst["__CONTROL_MESSAGE__"] = control_message
-                mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
-                utils.send_mail (mail_message)
-            if action:
-                self.Logger.log(["setting bugs to fixed"]+bugs)
+                else:
+                    self.Subst["__STABLE_WARNING__"] = ""
+                    mail_message = utils.TemplateSubst(self.Subst, template)
+                    utils.send_mail(mail_message)
+
+                # Clear up after ourselves
+                del self.Subst["__BUG_NUMBER__"]
+                del self.Subst["__STABLE_WARNING__"]
+
+        if action:
+            self.Logger.log(["closing bugs"] + bugs)
+
         summary += "\n"
+
         return summary
 
     ###########################################################################
 
-    def announce (self, short_summary, action):
-        Subst = self.Subst
-        Cnf = self.Cnf
-        changes = self.pkg.changes
+    def announce(self, short_summary, action):
+        """
+        Send an announce mail about a new upload.
+
+        @type short_summary: string
+        @param short_summary: Short summary text to include in the mail
+
+        @type action: bool
+        @param action: Set to false no real action will be done.
+
+        @rtype: string
+        @return: Textstring about action taken.
+
+        """
+
+        cnf = Config()
+        announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
 
         # Only do announcements for source uploads with a recent dpkg-dev installed
-        if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
+        if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
+           self.pkg.changes["architecture"].has_key("source"):
             return ""
 
         lists_done = {}
         summary = ""
-        Subst["__SHORT_SUMMARY__"] = short_summary
 
-        for dist in changes["distribution"].keys():
-            list = Cnf.Find("Suite::%s::Announce" % (dist))
-            if list == "" or lists_done.has_key(list):
+        self.Subst["__SHORT_SUMMARY__"] = short_summary
+
+        for dist in self.pkg.changes["distribution"].keys():
+            announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
+            if announce_list == "" or lists_done.has_key(announce_list):
                 continue
-            lists_done[list] = 1
-            summary += "Announcing to %s\n" % (list)
+
+            lists_done[announce_list] = 1
+            summary += "Announcing to %s\n" % (announce_list)
 
             if action:
-                Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
-                if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
-                    Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
-                mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
-                utils.send_mail (mail_message)
+                self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
+                if cnf.get("Dinstall::TrackingServer") and \
+                   self.pkg.changes["architecture"].has_key("source"):
+                    trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
+                    self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
+
+                mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
+                utils.send_mail(mail_message)
+
+                del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
 
-        if Cnf.FindB("Dinstall::CloseBugs"):
+        if cnf.FindB("Dinstall::CloseBugs"):
             summary = self.close_bugs(summary, action)
 
+        del self.Subst["__SHORT_SUMMARY__"]
+
         return summary
 
     ###########################################################################
 
-    def accept (self, summary, short_summary):
-        Cnf = self.Cnf
-        Subst = self.Subst
-        files = self.pkg.files
-        changes = self.pkg.changes
-        changes_file = self.pkg.changes_file
-        dsc = self.pkg.dsc
+    def accept (self, summary, short_summary, targetdir=None):
+        """
+        Accept an upload.
+
+        This moves all files referenced from the .changes into the I{accepted}
+        queue, sends the accepted mail, announces to lists, closes bugs and
+        also checks for override disparities. If enabled it will write out
+        the version history for the BTS Version Tracking and will finally call
+        L{queue_build}.
+
+        @type summary: string
+        @param summary: Summary text
+
+        @type short_summary: string
+        @param short_summary: Short summary
+
+        """
+
+        cnf = Config()
+        stats = SummaryStats()
+
+        accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
+
+        if targetdir is None:
+            targetdir = cnf["Dir::Queue::Accepted"]
 
         print "Accepting."
-        self.Logger.log(["Accepting changes",changes_file])
+        self.Logger.log(["Accepting changes", self.pkg.changes_file])
 
-        self.dump_vars(Cnf["Dir::Queue::Accepted"])
+        self.write_dot_dak(targetdir)
 
         # Move all the files into the accepted directory
-        utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
-        file_keys = files.keys()
-        for file in file_keys:
-            utils.move(file, Cnf["Dir::Queue::Accepted"])
-            self.accept_bytes += float(files[file]["size"])
-        self.accept_count += 1
+        utils.move(self.pkg.changes_file, targetdir)
+
+        for name, entry in sorted(self.pkg.files.items()):
+            utils.move(name, targetdir)
+            stats.accept_bytes += float(entry["size"])
+
+        stats.accept_count += 1
 
         # Send accept mail, announce to lists, close bugs and check for
         # override disparities
-        if not Cnf["Dinstall::Options::No-Mail"]:
-            Subst["__SUITE__"] = ""
-            Subst["__SUMMARY__"] = summary
-            mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
+        if not cnf["Dinstall::Options::No-Mail"]:
+            self.Subst["__SUITE__"] = ""
+            self.Subst["__SUMMARY__"] = summary
+            mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
             utils.send_mail(mail_message)
             self.announce(short_summary, 1)
 
-
         ## Helper stuff for DebBugs Version Tracking
-        if Cnf.Find("Dir::Queue::BTSVersionTrack"):
+        if cnf.Find("Dir::Queue::BTSVersionTrack"):
             # ??? once queue/* is cleared on *.d.o and/or reprocessed
             # the conditionalization on dsc["bts changelog"] should be
             # dropped.
 
             # Write out the version history from the changelog
-            if changes["architecture"].has_key("source") and \
-               dsc.has_key("bts changelog"):
+            if self.pkg.changes["architecture"].has_key("source") and \
+               self.pkg.dsc.has_key("bts changelog"):
 
-                temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
-                                                    dotprefix=1, perms=0644)
-                version_history = utils.open_file(temp_filename, 'w')
-                version_history.write(dsc["bts changelog"])
+                (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
+                version_history = os.fdopen(fd, 'w')
+                version_history.write(self.pkg.dsc["bts changelog"])
                 version_history.close()
-                filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
-                                      changes_file[:-8]+".versions")
+                filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
+                                      self.pkg.changes_file[:-8]+".versions")
                 os.rename(temp_filename, filename)
+                os.chmod(filename, 0644)
 
             # Write out the binary -> source mapping.
-            temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
-                                                dotprefix=1, perms=0644)
-            debinfo = utils.open_file(temp_filename, 'w')
-            for file in file_keys:
-                f = files[file]
-                if f["type"] == "deb":
-                    line = " ".join([f["package"], f["version"],
-                                     f["architecture"], f["source package"],
-                                     f["source version"]])
+            (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
+            debinfo = os.fdopen(fd, 'w')
+            for name, entry in sorted(self.pkg.files.items()):
+                if entry["type"] == "deb":
+                    line = " ".join([entry["package"], entry["version"],
+                                     entry["architecture"], entry["source package"],
+                                     entry["source version"]])
                     debinfo.write(line+"\n")
             debinfo.close()
-            filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
-                                  changes_file[:-8]+".debinfo")
+            filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
+                                  self.pkg.changes_file[:-8]+".debinfo")
             os.rename(temp_filename, filename)
+            os.chmod(filename, 0644)
+
+        # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
+        # <Ganneff> we do call queue_build too
+        # <mhy> well yes, we'd have had to if we were inserting into accepted
+        # <Ganneff> now. thats database only.
+        # <mhy> urgh, that's going to get messy
+        # <Ganneff> so i make the p-n call to it *also* using accepted/
+        # <mhy> but then the packages will be in the queue_build table without the files being there
+        # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
+        # <mhy> ah, good point
+        # <Ganneff> so it will work out, as unchecked move it over
+        # <mhy> that's all completely sick
+        # <Ganneff> yes
+
+        # This routine returns None on success or an error on failure
+        res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
+        if res:
+            utils.fubar(res)
 
-        self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
 
-    ###########################################################################
-
-    def queue_build (self, queue, path):
-        Cnf = self.Cnf
-        Subst = self.Subst
-        files = self.pkg.files
-        changes = self.pkg.changes
-        changes_file = self.pkg.changes_file
-        dsc = self.pkg.dsc
-        file_keys = files.keys()
-
-        ## Special support to enable clean auto-building of queued packages
-        queue_id = database.get_or_set_queue_id(queue)
-
-        self.projectB.query("BEGIN WORK")
-        for suite in changes["distribution"].keys():
-            if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
-                continue
-            suite_id = database.get_suite_id(suite)
-            dest_dir = Cnf["Dir::QueueBuild"]
-            if Cnf.FindB("Dinstall::SecurityQueueBuild"):
-                dest_dir = os.path.join(dest_dir, suite)
-            for file in file_keys:
-                src = os.path.join(path, file)
-                dest = os.path.join(dest_dir, file)
-                if Cnf.FindB("Dinstall::SecurityQueueBuild"):
-                    # Copy it since the original won't be readable by www-data
-                    utils.copy(src, dest)
-                else:
-                    # Create a symlink to it
-                    os.symlink(src, dest)
-                # Add it to the list of packages for later processing by apt-ftparchive
-                self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
-            # If the .orig.tar.gz is in the pool, create a symlink to
-            # it (if one doesn't already exist)
-            if self.pkg.orig_tar_id:
-                # Determine the .orig.tar.gz file name
-                for dsc_file in self.pkg.dsc_files.keys():
-                    if dsc_file.endswith(".orig.tar.gz"):
-                        filename = dsc_file
-                dest = os.path.join(dest_dir, filename)
-                # If it doesn't exist, create a symlink
-                if not os.path.exists(dest):
-                    # Find the .orig.tar.gz in the pool
-                    q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
-                    ql = q.getresult()
-                    if not ql:
-                        utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
-                    src = os.path.join(ql[0][0], ql[0][1])
-                    os.symlink(src, dest)
-                    # Add it to the list of packages for later processing by apt-ftparchive
-                    self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
-                # if it does, update things to ensure it's not removed prematurely
-                else:
-                    self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
-
-        self.projectB.query("COMMIT WORK")
+    def check_override (self):
+        """
+        Checks override entries for validity. Mails "Override disparity" warnings,
+        if that feature is enabled.
 
-    ###########################################################################
+        Abandons the check if
+          - override disparity checks are disabled
+          - mail sending is disabled
+        """
 
-    def check_override (self):
-        Subst = self.Subst
-        changes = self.pkg.changes
-        files = self.pkg.files
-        Cnf = self.Cnf
+        cnf = Config()
 
         # Abandon the check if:
-        #  a) it's a non-sourceful upload
-        #  b) override disparity checks have been disabled
-        #  c) we're not sending mail
-        if not changes["architecture"].has_key("source") or \
-           not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
-           Cnf["Dinstall::Options::No-Mail"]:
+        #  a) override disparity checks have been disabled
+        #  b) we're not sending mail
+        if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
+           cnf["Dinstall::Options::No-Mail"]:
             return
 
-        summary = ""
-        file_keys = files.keys()
-        file_keys.sort()
-        for file in file_keys:
-            if not files[file].has_key("new") and files[file]["type"] == "deb":
-                section = files[file]["section"]
-                override_section = files[file]["override section"]
-                if section.lower() != override_section.lower() and section != "-":
-                    # Ignore this; it's a common mistake and not worth whining about
-                    if section.lower() == "non-us/main" and override_section.lower() == "non-us":
-                        continue
-                    summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
-                priority = files[file]["priority"]
-                override_priority = files[file]["override priority"]
-                if priority != override_priority and priority != "-":
-                    summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
+        summary = self.pkg.check_override()
 
         if summary == "":
             return
 
-        Subst["__SUMMARY__"] = summary
-        mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
+        overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
+
+        self.Subst["__SUMMARY__"] = summary
+        mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
         utils.send_mail(mail_message)
+        del self.Subst["__SUMMARY__"]
 
     ###########################################################################
+    def force_reject(self, reject_files):
+        """
+        Forcefully move files from the current directory to the
+        reject directory.  If any file already exists in the reject
+        directory it will be moved to the morgue to make way for
+        the new file.
 
-    def force_reject (self, files):
-        """Forcefully move files from the current directory to the
-           reject directory.  If any file already exists in the reject
-           directory it will be moved to the morgue to make way for
-           the new file."""
+        @type files: dict
+        @param files: file dictionary
 
-        Cnf = self.Cnf
+        """
 
-        for file in files:
+        cnf = Config()
+
+        for file_entry in reject_files:
             # Skip any files which don't exist or which we don't have permission to copy.
-            if os.access(file,os.R_OK) == 0:
+            if os.access(file_entry, os.R_OK) == 0:
                 continue
-            dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
+
+            dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
+
             try:
-                dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
+                dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
             except OSError, e:
                 # File exists?  Let's try and move it to the morgue
-                if errno.errorcode[e.errno] == 'EEXIST':
-                    morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
+                if e.errno == errno.EEXIST:
+                    morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
                     try:
                         morgue_file = utils.find_next_free(morgue_file)
-                    except utils.tried_too_hard_exc:
+                    except NoFreeFilenameError:
                         # Something's either gone badly Pete Tong, or
                         # someone is trying to exploit us.
-                        utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
+                        utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
                         return
                     utils.move(dest_file, morgue_file, perms=0660)
                     try:
                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
                     except OSError, e:
                         # Likewise
-                        utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
+                        utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
                         return
                 else:
                     raise
             # If we got here, we own the destination file, so we can
             # safely overwrite it.
-            utils.move(file, dest_file, 1, perms=0660)
+            utils.move(file_entry, dest_file, 1, perms=0660)
             os.close(dest_fd)
 
     ###########################################################################
+    def do_reject (self, manual=0, reject_message="", note=""):
+        """
+        Reject an upload. If called without a reject message or C{manual} is
+        true, spawn an editor so the user can write one.
+
+        @type manual: bool
+        @param manual: manual or automated rejection
 
-    def do_reject (self, manual = 0, reject_message = ""):
+        @type reject_message: string
+        @param reject_message: A reject message
+
+        @return: 0
+
+        """
         # If we weren't given a manual rejection message, spawn an
         # editor so the user can add one in...
         if manual and not reject_message:
-            temp_filename = utils.temp_filename()
+            (fd, temp_filename) = utils.temp_filename()
+            temp_file = os.fdopen(fd, 'w')
+            if len(note) > 0:
+                for line in note:
+                    temp_file.write(line)
+            temp_file.close()
             editor = os.environ.get("EDITOR","vi")
             answer = 'E'
             while answer == 'E':
@@ -630,15 +831,13 @@ distribution."""
 
         print "Rejecting.\n"
 
-        Cnf = self.Cnf
-        Subst = self.Subst
-        pkg = self.pkg
+        cnf = Config()
 
-        reason_filename = pkg.changes_file[:-8] + ".reason"
-        reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
+        reason_filename = self.pkg.changes_file[:-8] + ".reason"
+        reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
 
         # Move all the files into the reject directory
-        reject_files = pkg.files.keys() + [pkg.changes_file]
+        reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
         self.force_reject(reject_files)
 
         # If we fail here someone is probably trying to exploit the race
@@ -647,175 +846,156 @@ distribution."""
             os.unlink(reason_filename)
         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
 
+        rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
+
         if not manual:
-            Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
-            Subst["__MANUAL_REJECT_MESSAGE__"] = ""
-            Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
+            self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
+            self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
+            self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
             os.write(reason_fd, reject_message)
-            reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
+            reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
         else:
             # Build up the rejection email
-            user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
-
-            Subst["__REJECTOR_ADDRESS__"] = user_email_address
-            Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
-            Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
-            reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
+            user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
+            self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
+            self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
+            self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
+            reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
             # Write the rejection email out as the <foo>.reason file
             os.write(reason_fd, reject_mail_message)
 
+        del self.Subst["__REJECTOR_ADDRESS__"]
+        del self.Subst["__MANUAL_REJECT_MESSAGE__"]
+        del self.Subst["__CC__"]
+
         os.close(reason_fd)
 
         # Send the rejection mail if appropriate
-        if not Cnf["Dinstall::Options::No-Mail"]:
+        if not cnf["Dinstall::Options::No-Mail"]:
             utils.send_mail(reject_mail_message)
 
         self.Logger.log(["rejected", pkg.changes_file])
+
         return 0
 
     ################################################################################
+    def in_override_p(self, package, component, suite, binary_type, file, session=None):
+        """
+        Check if a package already has override entries in the DB
 
-    # Ensure that source exists somewhere in the archive for the binary
-    # upload being processed.
-    #
-    # (1) exact match                      => 1.0-3
-    # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
-
-    def source_exists (self, package, source_version, suites = ["any"]):
-       okay = 1
-       for suite in suites:
-           if suite == "any":
-               que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
-                   (package)
-           else:
-               # source must exist in suite X, or in some other suite that's
-               # mapped to X, recursively... silent-maps are counted too,
-               # unreleased-maps aren't.
-               maps = self.Cnf.ValueList("SuiteMappings")[:]
-               maps.reverse()
-               maps = [ m.split() for m in maps ]
-               maps = [ (x[1], x[2]) for x in maps
-                               if x[0] == "map" or x[0] == "silent-map" ]
-               s = [suite]
-               for x in maps:
-                       if x[1] in s and x[0] not in s:
-                               s.append(x[0])
-
-               que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
-            q = self.projectB.query(que)
-
-            # Reduce the query results to a list of version numbers
-            ql = [ i[0] for i in q.getresult() ]
-
-            # Try (1)
-            if source_version in ql:
-                continue
+        @type package: string
+        @param package: package name
 
-            # Try (2)
-            orig_source_version = re_bin_only_nmu.sub('', source_version)
-            if orig_source_version in ql:
-                continue
+        @type component: string
+        @param component: database id of the component
 
-            # No source found...
-            okay = 0
-           break
-       return okay
+        @type suite: int
+        @param suite: database id of the suite
 
-    ################################################################################
-    
-    def in_override_p (self, package, component, suite, binary_type, file):
-        files = self.pkg.files
+        @type binary_type: string
+        @param binary_type: type of the package
+
+        @type file: string
+        @param file: filename we check
+
+        @return: the database result. But noone cares anyway.
+
+        """
+
+        cnf = Config()
+
+        if session is None:
+            session = DBConn().session()
 
         if binary_type == "": # must be source
-            type = "dsc"
+            file_type = "dsc"
         else:
-            type = binary_type
+            file_type = binary_type
 
         # Override suite name; used for example with proposed-updates
-        if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
-            suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
-
-        # Avoid <undef> on unknown distributions
-        suite_id = database.get_suite_id(suite)
-        if suite_id == -1:
-            return None
-        component_id = database.get_component_id(component)
-        type_id = database.get_override_type_id(type)
-
-        # FIXME: nasty non-US speficic hack
-        if component.lower().startswith("non-us/"):
-            component = component[7:]
-
-        q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
-                           % (package, suite_id, component_id, type_id))
-        result = q.getresult()
+        if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
+            suite = cnf["Suite::%s::OverrideSuite" % (suite)]
+
+        result = get_override(package, suite, component, file_type, session)
+
         # If checking for a source package fall back on the binary override type
-        if type == "dsc" and not result:
-            deb_type_id = database.get_override_type_id("deb")
-            udeb_type_id = database.get_override_type_id("udeb")
-            q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
-                               % (package, suite_id, component_id, deb_type_id, udeb_type_id))
-            result = q.getresult()
+        if file_type == "dsc" and len(result) < 1:
+            result = get_override(package, suite, component, ['deb', 'udeb'], session)
 
         # Remember the section and priority so we can check them later if appropriate
-        if result:
-            files[file]["override section"] = result[0][0]
-            files[file]["override priority"] = result[0][1]
+        if len(result) > 0:
+            result = result[0]
+            self.pkg.files[file]["override section"] = result.section.section
+            self.pkg.files[file]["override priority"] = result.priority.priority
+            return result
 
-        return result
+        return None
 
     ################################################################################
+    def get_anyversion(self, sv_list, suite):
+        """
+        @type sv_list: list
+        @param sv_list: list of (suite, version) tuples to check
 
-    def reject (self, str, prefix="Rejected: "):
-        if str:
-            # Unlike other rejects we add new lines first to avoid trailing
-            # new lines when this message is passed back up to a caller.
-            if self.reject_message:
-                self.reject_message += "\n"
-            self.reject_message += prefix + str
-
-    ################################################################################
+        @type suite: string
+        @param suite: suite name
 
-    def get_anyversion(self, query_result, suite):
-        anyversion=None
+        Description: TODO
+        """
+        anyversion = None
         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
-        for (v, s) in query_result:
+        for (s, v) in sv_list:
             if s in [ x.lower() for x in anysuite ]:
                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
-                    anyversion=v
+                    anyversion = v
+
         return anyversion
 
     ################################################################################
 
-    def cross_suite_version_check(self, query_result, file, new_version):
-        """Ensure versions are newer than existing packages in target
+    def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
+        """
+        @type sv_list: list
+        @param sv_list: list of (suite, version) tuples to check
+
+        @type file: string
+        @param file: XXX
+
+        @type new_version: string
+        @param new_version: XXX
+
+        Ensure versions are newer than existing packages in target
         suites and that cross-suite version checking rules as
-        set out in the conf file are satisfied."""
+        set out in the conf file are satisfied.
+        """
+
+        cnf = Config()
 
         # Check versions for each target suite
         for target_suite in self.pkg.changes["distribution"].keys():
-            must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
-            must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
+            must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
+            must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
+
             # Enforce "must be newer than target suite" even if conffile omits it
             if target_suite not in must_be_newer_than:
                 must_be_newer_than.append(target_suite)
-            for entry in query_result:
-                existent_version = entry[0]
-                suite = entry[1]
-                if suite in must_be_newer_than and \
-                   apt_pkg.VersionCompare(new_version, existent_version) < 1:
-                    self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
-                if suite in must_be_older_than and \
-                   apt_pkg.VersionCompare(new_version, existent_version) > -1:
-                    ch = self.pkg.changes
+
+            for (suite, existent_version) in sv_list:
+                vercmp = apt_pkg.VersionCompare(new_version, existent_version)
+
+                if suite in must_be_newer_than and sourceful and vercmp < 1:
+                    self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
+
+                if suite in must_be_older_than and vercmp > -1:
                     cansave = 0
-                    if ch.get('distribution-version', {}).has_key(suite):
+
+                    if self.pkg.changes.get('distribution-version', {}).has_key(suite):
                         # we really use the other suite, ignoring the conflicting one ...
-                        addsuite = ch["distribution-version"][suite]
-                    
-                        add_version = self.get_anyversion(query_result, addsuite)
-                        target_version = self.get_anyversion(query_result, target_suite)
-                    
+                        addsuite = self.pkg.changes["distribution-version"][suite]
+
+                        add_version = self.get_anyversion(sv_list, addsuite)
+                        target_version = self.get_anyversion(sv_list, target_suite)
+
                         if not add_version:
                             # not add_version can only happen if we map to a suite
                             # that doesn't enhance the suite we're propup'ing from.
@@ -826,7 +1006,7 @@ distribution."""
                             # than complaining. either way, this isn't a REJECT issue
                             #
                             # And - we really should complain to the dorks who configured dak
-                            self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
+                            self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
                             self.pkg.changes.setdefault("propdistribution", {})
                             self.pkg.changes["propdistribution"][addsuite] = 1
                             cansave = 1
@@ -834,97 +1014,97 @@ distribution."""
                             # not targets_version is true when the package is NEW
                             # we could just stick with the "...old version..." REJECT
                             # for this, I think.
-                            self.reject("Won't propogate NEW packages.")
+                            self.rejects.append("Won't propogate NEW packages.")
                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
                             # propogation would be redundant. no need to reject though.
-                            self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
+                            self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
                             cansave = 1
                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
                             # propogate!!
-                            self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
+                            self.warnings.append("Propogating upload to %s" % (addsuite))
                             self.pkg.changes.setdefault("propdistribution", {})
                             self.pkg.changes["propdistribution"][addsuite] = 1
                             cansave = 1
-                
+
                     if not cansave:
-                        self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
+                        self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
 
     ################################################################################
 
-    def check_binary_against_db(self, file):
-        self.reject_message = ""
-        files = self.pkg.files
+    def check_binary_against_db(self, file, session=None):
+        """
+
+        """
+
+        if session is None:
+            session = DBConn().session()
 
         # Ensure version is sane
-        q = self.projectB.query("""
-SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
-                                     architecture a
- WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
-   AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
-                                % (files[file]["package"],
-                                   files[file]["architecture"]))
-        self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
+        q = session.query(BinAssociation)
+        q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
+        q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
+
+        self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
+                                       file, files[file]["version"], sourceful=False)
 
         # Check for any existing copies of the file
-        q = self.projectB.query("""
-SELECT b.id FROM binaries b, architecture a
- WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
-   AND a.id = b.architecture"""
-                                % (files[file]["package"],
-                                   files[file]["version"],
-                                   files[file]["architecture"]))
-        if q.getresult():
-            self.reject("%s: can not overwrite existing copy already in the archive." % (file))
-
-        return self.reject_message
+        q = session.query(DBBinary).filter_by(files[file]["package"])
+        q = q.filter_by(version=files[file]["version"])
+        q = q.join(Architecture).filter_by(arch_string=files[file]["architecture"])
+
+        if q.count() > 0:
+            self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
 
     ################################################################################
 
-    def check_source_against_db(self, file):
-        self.reject_message = ""
-        dsc = self.pkg.dsc
+    def check_source_against_db(self, file, session=None):
+        """
+        """
+        if session is None:
+            session = DBConn().session()
+
+        source = self.pkg.dsc.get("source")
+        version = self.pkg.dsc.get("version")
 
         # Ensure version is sane
-        q = self.projectB.query("""
-SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
- WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
-        self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
+        q = session.query(SrcAssociation)
+        q = q.join(DBSource).filter(DBSource.source==source)
 
-        return self.reject_message
+        self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
+                                       file, version, sourceful=True)
 
     ################################################################################
+    def check_dsc_against_db(self, file):
+        """
 
-    # **WARNING**
-    # NB: this function can remove entries from the 'files' index [if
-    # the .orig.tar.gz is a duplicate of the one in the archive]; if
-    # you're iterating over 'files' and call this function as part of
-    # the loop, be sure to add a check to the top of the loop to
-    # ensure you haven't just tried to derefernece the deleted entry.
-    # **WARNING**
+        @warning: NB: this function can remove entries from the 'files' index [if
+         the .orig.tar.gz is a duplicate of the one in the archive]; if
+         you're iterating over 'files' and call this function as part of
+         the loop, be sure to add a check to the top of the loop to
+         ensure you haven't just tried to dereference the deleted entry.
 
-    def check_dsc_against_db(self, file):
-        self.reject_message = ""
-        files = self.pkg.files
-        dsc_files = self.pkg.dsc_files
-        legacy_source_untouchable = self.pkg.legacy_source_untouchable
+        """
         self.pkg.orig_tar_gz = None
 
         # Try and find all files mentioned in the .dsc.  This has
         # to work harder to cope with the multiple possible
         # locations of an .orig.tar.gz.
-        for dsc_file in dsc_files.keys():
+        # The ordering on the select is needed to pick the newest orig
+        # when it exists in multiple places.
+        for dsc_name, dsc_entry in self.pkg.dsc_files.items():
             found = None
-            if files.has_key(dsc_file):
-                actual_md5 = files[dsc_file]["md5sum"]
-                actual_size = int(files[dsc_file]["size"])
-                found = "%s in incoming" % (dsc_file)
+            if self.pkg.files.has_key(dsc_name):
+                actual_md5 = self.pkg.files[dsc_name]["md5sum"]
+                actual_size = int(self.pkg.files[dsc_name]["size"])
+                found = "%s in incoming" % (dsc_name)
+
                 # Check the file does not already exist in the archive
-                q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
-                ql = q.getresult()
+                ql = get_poolfile_like_name(dsc_name)
+
                 # Strip out anything that isn't '%s' or '/%s$'
                 for i in ql:
-                    if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
+                    if not i.filename.endswith(dsc_name):
                         ql.remove(i)
 
                 # "[dak] has not broken them.  [dak] has fixed a
@@ -936,100 +1116,88 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
                 # the same name and version.)"
                 #                        -- ajk@ on d-devel@l.d.o
 
-                if ql:
+                if len(ql) > 0:
                     # Ignore exact matches for .orig.tar.gz
                     match = 0
-                    if dsc_file.endswith(".orig.tar.gz"):
+                    if dsc_name.endswith(".orig.tar.gz"):
                         for i in ql:
-                            if files.has_key(dsc_file) and \
-                               int(files[dsc_file]["size"]) == int(i[0]) and \
-                               files[dsc_file]["md5sum"] == i[1]:
-                                self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
-                                del files[dsc_file]
-                                self.pkg.orig_tar_gz = i[2] + i[3]
+                            if self.pkg.files.has_key(dsc_name) and \
+                               int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
+                               self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
+                                self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
+                                # TODO: Don't delete the entry, just mark it as not needed
+                                # This would fix the stupidity of changing something we often iterate over
+                                # whilst we're doing it
+                                del files[dsc_name]
+                                self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
                                 match = 1
 
                     if not match:
-                        self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
-            elif dsc_file.endswith(".orig.tar.gz"):
+                        self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
+
+            elif dsc_name.endswith(".orig.tar.gz"):
                 # Check in the pool
-                q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
-                ql = q.getresult()
+                ql = get_poolfile_like_name(dsc_name)
+
                 # Strip out anything that isn't '%s' or '/%s$'
+                # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
                 for i in ql:
-                    if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
+                    if not i.filename.endswith(dsc_name):
                         ql.remove(i)
 
-                if ql:
+                if len(ql) > 0:
                     # Unfortunately, we may get more than one match here if,
                     # for example, the package was in potato but had an -sa
                     # upload in woody.  So we need to choose the right one.
 
-                    x = ql[0]; # default to something sane in case we don't match any or have only one
+                    # default to something sane in case we don't match any or have only one
+                    x = ql[0]
 
                     if len(ql) > 1:
                         for i in ql:
-                            old_file = i[0] + i[1]
+                            old_file = os.path.join(i.location.path, i.filename)
                             old_file_fh = utils.open_file(old_file)
                             actual_md5 = apt_pkg.md5sum(old_file_fh)
                             old_file_fh.close()
                             actual_size = os.stat(old_file)[stat.ST_SIZE]
-                            if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
+                            if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
                                 x = i
-                            else:
-                                legacy_source_untouchable[i[3]] = ""
 
-                    old_file = x[0] + x[1]
+                    old_file = os.path.join(i.location.path, i.filename)
                     old_file_fh = utils.open_file(old_file)
                     actual_md5 = apt_pkg.md5sum(old_file_fh)
                     old_file_fh.close()
                     actual_size = os.stat(old_file)[stat.ST_SIZE]
                     found = old_file
-                    suite_type = x[2]
-                    dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
+                    suite_type = f.location.archive_type
+                    # need this for updating dsc_files in install()
+                    dsc_entry["files id"] = f.file_id
                     # See install() in process-accepted...
-                    self.pkg.orig_tar_id = x[3]
+                    self.pkg.orig_tar_id = f.file_id
                     self.pkg.orig_tar_gz = old_file
-                    if suite_type == "legacy" or suite_type == "legacy-mixed":
-                        self.pkg.orig_tar_location = "legacy"
-                    else:
-                        self.pkg.orig_tar_location = x[4]
+                    self.pkg.orig_tar_location = f.location.location_id
                 else:
+                    # TODO: Record the queues and info in the DB so we don't hardcode all this crap
                     # Not there? Check the queue directories...
-
-                    in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
-                    # See process_it() in 'dak process-unchecked' for explanation of this
-                    if os.path.exists(in_unchecked):
-                        return (self.reject_message, in_unchecked)
-                    else:
-                        for dir in [ "Accepted", "New", "Byhand" ]:
-                            in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
-                            if os.path.exists(in_otherdir):
-                                in_otherdir_fh = utils.open_file(in_otherdir)
-                                actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
-                                in_otherdir_fh.close()
-                                actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
-                                found = in_otherdir
-                                self.pkg.orig_tar_gz = in_otherdir
+                    for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
+                        in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
+                        if os.path.exists(in_otherdir):
+                            in_otherdir_fh = utils.open_file(in_otherdir)
+                            actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
+                            in_otherdir_fh.close()
+                            actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
+                            found = in_otherdir
+                            self.pkg.orig_tar_gz = in_otherdir
 
                     if not found:
-                        self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
+                        self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
                         self.pkg.orig_tar_gz = -1
                         continue
             else:
-                self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
+                self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
                 continue
-            if actual_md5 != dsc_files[dsc_file]["md5sum"]:
-                self.reject("md5sum for %s doesn't match %s." % (found, file))
-            if actual_size != int(dsc_files[dsc_file]["size"]):
-                self.reject("size for %s doesn't match %s." % (found, file))
-
-        return (self.reject_message, None)
-
-    def do_query(self, q):
-        sys.stderr.write("query: \"%s\" ... " % (q))
-        before = time.time()
-        r = self.projectB.query(q)
-        time_diff = time.time()-before
-        sys.stderr.write("took %.3f seconds.\n" % (time_diff))
-        return r
+            if actual_md5 != dsc_entry["md5sum"]:
+                self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
+            if actual_size != int(dsc_entry["size"]):
+                self.rejects.append("size for %s doesn't match %s." % (found, file))
+