]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/queue.py
Fix various errors caused by moving stuff around
[dak.git] / daklib / queue.py
index 77f402502ca0b6952ea400ec7d1a9903bc4c1992..bfacbfaf197b5b89d115150fa3eb0d6396e24f50 100755 (executable)
@@ -26,48 +26,55 @@ Queue utility functions for dak
 
 ###############################################################################
 
-import cPickle
 import errno
 import os
-import pg
 import stat
 import sys
 import time
 import apt_inst
 import apt_pkg
 import utils
+import commands
+import shutil
+import textwrap
 from types import *
+from sqlalchemy.sql.expression import desc
+from sqlalchemy.orm.exc import NoResultFound
+
+import yaml
 
 from dak_exceptions import *
 from changes import *
 from regexes import *
 from config import Config
 from holding import Holding
+from urgencylog import UrgencyLog
 from dbconn import *
 from summarystats import SummaryStats
-from utils import parse_changes
+from utils import parse_changes, check_dsc_files
 from textutils import fix_maintainer
+from binary import Binary
 
 ###############################################################################
 
-def get_type(f, session=None):
+def get_type(f, session):
     """
     Get the file type of C{f}
 
     @type f: dict
     @param f: file entry from Changes object
 
+    @type session: SQLA Session
+    @param session: SQL Alchemy session object
+
     @rtype: string
     @return: filetype
 
     """
-    if session is None:
-        session = DBConn().session()
-
     # Determine the type
     if f.has_key("dbtype"):
-        file_type = file["dbtype"]
-    elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+        file_type = f["dbtype"]
+    elif re_source_ext.match(f["type"]):
         file_type = "dsc"
     else:
         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
@@ -112,7 +119,7 @@ def determine_new(changes, files, warn=1):
         pkg = f["package"]
         priority = f["priority"]
         section = f["section"]
-        file_type = get_type(f)
+        file_type = get_type(f, session)
         component = f["component"]
 
         if file_type == "dsc":
@@ -157,6 +164,8 @@ def determine_new(changes, files, warn=1):
             if new[pkg].has_key("othercomponents"):
                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
 
+    session.close()
+
     return new
 
 ################################################################################
@@ -205,28 +214,14 @@ def check_valid(new):
 
 ###############################################################################
 
-def lookup_uid_from_fingerprint(fpr, session):
-    uid = None
-    uid_name = ""
-    # This is a stupid default, but see the comments below
-    is_dm = False
-
-    user = get_uid_from_fingerprint(changes["fingerprint"], session)
-
-    if user is not None:
-        uid = user.uid
-        if user.name is None:
-            uid_name = ''
-        else:
-            uid_name = user.name
-
-        # Check the relevant fingerprint (which we have to have)
-        for f in uid.fingerprint:
-            if f.fingerprint == changes['fingerprint']:
-                is_dm = f.keyring.debian_maintainer
-                break
-
-    return (uid, uid_name, is_dm)
+def check_status(files):
+    new = byhand = 0
+    for f in files.keys():
+        if files[f]["type"] == "byhand":
+            byhand = 1
+        elif files[f].has_key("new"):
+            new = 1
+    return (new, byhand)
 
 ###############################################################################
 
@@ -255,6 +250,7 @@ class Upload(object):
 
     """
     def __init__(self):
+        self.logger = None
         self.pkg = Changes()
         self.reset()
 
@@ -278,19 +274,20 @@ class Upload(object):
         self.pkg.reset()
 
     def package_info(self):
-        msg = ''
-
-        if len(self.rejects) > 0:
-            msg += "Reject Reasons:\n"
-            msg += "\n".join(self.rejects)
+        """
+        Format various messages from this Upload to send to the maintainer.
+        """
 
-        if len(self.warnings) > 0:
-            msg += "Warnings:\n"
-            msg += "\n".join(self.warnings)
+        msgs = (
+            ('Reject Reasons', self.rejects),
+            ('Warnings', self.warnings),
+            ('Notes', self.notes),
+        )
 
-        if len(self.notes) > 0:
-            msg += "Notes:\n"
-            msg += "\n".join(self.notes)
+        msg = ''
+        for title, messages in msgs:
+            if messages:
+                msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
 
         return msg
 
@@ -302,7 +299,7 @@ class Upload(object):
 
         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
         if not self.pkg.changes.has_key("architecture") or not \
-           isinstance(changes["architecture"], DictType):
+           isinstance(self.pkg.changes["architecture"], dict):
             self.pkg.changes["architecture"] = { "Unknown" : "" }
 
         # and maintainer2047 may not exist.
@@ -319,7 +316,7 @@ class Upload(object):
            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
 
             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
-            self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
+            self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
         else:
             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
@@ -344,12 +341,13 @@ class Upload(object):
     ###########################################################################
     def load_changes(self, filename):
         """
-        @rtype boolean
+        @rtype: boolean
         @rvalue: whether the changes file was valid or not.  We may want to
                  reject even if this is True (see what gets put in self.rejects).
                  This is simply to prevent us even trying things later which will
                  fail because we couldn't properly parse the file.
         """
+        Cnf = Config()
         self.pkg.changes_file = filename
 
         # Parse the .changes field into a dictionary
@@ -367,7 +365,7 @@ class Upload(object):
 
         # Parse the Files field from the .changes into another dictionary
         try:
-            self.pkg.files.update(build_file_list(self.pkg.changes))
+            self.pkg.files.update(utils.build_file_list(self.pkg.changes))
         except ParseChangesError, line:
             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
             return False
@@ -411,7 +409,7 @@ class Upload(object):
                    fix_maintainer (self.pkg.changes["maintainer"])
         except ParseMaintError, msg:
             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
-                   % (filename, changes["maintainer"], msg))
+                   % (filename, self.pkg.changes["maintainer"], msg))
 
         # ...likewise for the Changed-By: field if it exists.
         try:
@@ -442,9 +440,8 @@ class Upload(object):
         # Check there isn't already a changes file of the same name in one
         # of the queue directories.
         base_filename = os.path.basename(filename)
-        for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
-            if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
-                self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
+        if get_knownchange(base_filename):
+            self.rejects.append("%s: a file with this name already exists." % (base_filename))
 
         # Check the .changes is non-empty
         if not self.pkg.files:
@@ -479,7 +476,7 @@ class Upload(object):
                 (source, dest) = args[1:3]
                 if self.pkg.changes["distribution"].has_key(source):
                     for arch in self.pkg.changes["architecture"].keys():
-                        if arch not in [ arch_string for a in get_suite_architectures(source) ]:
+                        if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
                             del self.pkg.changes["distribution"][source]
                             self.pkg.changes["distribution"][dest] = 1
@@ -604,9 +601,9 @@ class Upload(object):
         entry["maintainer"] = control.Find("Maintainer", "")
 
         if f.endswith(".udeb"):
-            files[f]["dbtype"] = "udeb"
+            self.pkg.files[f]["dbtype"] = "udeb"
         elif f.endswith(".deb"):
-            files[f]["dbtype"] = "deb"
+            self.pkg.files[f]["dbtype"] = "deb"
         else:
             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
 
@@ -622,7 +619,7 @@ class Upload(object):
             source_version = m.group(2)
 
         if not source_version:
-            source_version = files[f]["version"]
+            source_version = self.pkg.files[f]["version"]
 
         entry["source package"] = source
         entry["source version"] = source_version
@@ -680,10 +677,12 @@ class Upload(object):
         # Check the version and for file overwrites
         self.check_binary_against_db(f, session)
 
-        b = Binary(f).scan_package()
-        if len(b.rejects) > 0:
-            for j in b.rejects:
-                self.rejects.append(j)
+        # Temporarily disable contents generation until we change the table storage layout
+        #b = Binary(f)
+        #b.scan_package()
+        #if len(b.rejects) > 0:
+        #    for j in b.rejects:
+        #        self.rejects.append(j)
 
     def source_file_checks(self, f, session):
         entry = self.pkg.files[f]
@@ -701,7 +700,7 @@ class Upload(object):
             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
 
         # Ensure the source version matches the version in the .changes file
-        if entry["type"] == "orig.tar.gz":
+        if re_is_orig_source.match(f):
             changes_version = self.pkg.changes["chopversion2"]
         else:
             changes_version = self.pkg.changes["chopversion"]
@@ -726,11 +725,22 @@ class Upload(object):
     def per_suite_file_checks(self, f, suite, session):
         cnf = Config()
         entry = self.pkg.files[f]
+        archive = utils.where_am_i()
 
         # Skip byhand
         if entry.has_key("byhand"):
             return
 
+        # Check we have fields we need to do these checks
+        oktogo = True
+        for m in ['component', 'package', 'priority', 'size', 'md5sum']:
+            if not entry.has_key(m):
+                self.rejects.append("file '%s' does not have field %s set" % (f, m))
+                oktogo = False
+
+        if not oktogo:
+            return
+
         # Handle component mappings
         for m in cnf.ValueList("ComponentMappings"):
             (source, dest) = m.split()
@@ -745,9 +755,8 @@ class Upload(object):
             return
 
         # Validate the component
-        component = entry["component"]
-        if not get_component(component, session):
-            self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
+        if not get_component(entry["component"], session):
+            self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
             return
 
         # See if the package is NEW
@@ -760,9 +769,9 @@ class Upload(object):
 
         # Determine the location
         location = cnf["Dir::Pool"]
-        l = get_location(location, component, archive, session)
+        l = get_location(location, entry["component"], archive, session)
         if l is None:
-            self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
+            self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
             entry["location id"] = -1
         else:
             entry["location id"] = l.location_id
@@ -785,22 +794,16 @@ class Upload(object):
 
         # Check for packages that have moved from one component to another
         entry['suite'] = suite
-        res = get_binary_components(files[f]['package'], suite, entry["architecture"], session)
+        res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
         if res.rowcount > 0:
             entry["othercomponents"] = res.fetchone()[0]
 
     def check_files(self, action=True):
-        archive = utils.where_am_i()
         file_keys = self.pkg.files.keys()
         holding = Holding()
         cnf = Config()
 
-        # XXX: As far as I can tell, this can no longer happen - see
-        #      comments by AJ in old revisions - mhy
-        # if reprocess is 2 we've already done this and we're checking
-        # things again for the new .orig.tar.gz.
-        # [Yes, I'm fully aware of how disgusting this is]
-        if action and self.reprocess < 2:
+        if action:
             cwd = os.getcwd()
             os.chdir(self.pkg.directory)
             for f in file_keys:
@@ -811,11 +814,10 @@ class Upload(object):
 
             os.chdir(cwd)
 
-        # Check there isn't already a .changes or .dak file of the same name in
-        # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
+        # Check there isn't already a .changes file of the same name in
+        # the proposed-updates "CopyChanges" storage directories.
         # [NB: this check must be done post-suite mapping]
         base_filename = os.path.basename(self.pkg.changes_file)
-        dot_dak_filename = base_filename[:-8] + ".dak"
 
         for suite in self.pkg.changes["distribution"].keys():
             copychanges = "Suite::%s::CopyChanges" % (suite)
@@ -824,17 +826,10 @@ class Upload(object):
                 self.rejects.append("%s: a file with this name already exists in %s" \
                            % (base_filename, cnf[copychanges]))
 
-            copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
-            if cnf.has_key(copy_dot_dak) and \
-                   os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
-                self.rejects.append("%s: a file with this name already exists in %s" \
-                           % (dot_dak_filename, Cnf[copy_dot_dak]))
-
-        self.reprocess = 0
         has_binaries = False
         has_source = False
 
-        s = DBConn().session()
+        session = DBConn().session()
 
         for f, entry in self.pkg.files.items():
             # Ensure the file does not already exist in one of the accepted directories
@@ -888,6 +883,8 @@ class Upload(object):
             for suite in self.pkg.changes["distribution"].keys():
                 self.per_suite_file_checks(f, suite, session)
 
+        session.close()
+
         # If the .changes file says it has source, it must have source.
         if self.pkg.changes["architecture"].has_key("source"):
             if not has_source:
@@ -897,7 +894,7 @@ class Upload(object):
                 self.rejects.append("source only uploads are not supported.")
 
     ###########################################################################
-    def check_dsc(self, action=True):
+    def check_dsc(self, action=True, session=None):
         """Returns bool indicating whether or not the source changes are valid"""
         # Ensure there is source to check
         if not self.pkg.changes["architecture"].has_key("source"):
@@ -934,7 +931,7 @@ class Upload(object):
 
         # Build up the file list of files mentioned by the .dsc
         try:
-            self.pkg.dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
+            self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
         except NoFilesFieldError:
             self.rejects.append("%s: no Files: field." % (dsc_filename))
             return False
@@ -954,13 +951,14 @@ class Upload(object):
         # Validate the source and version fields
         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
-        if not re_valid_version.match(dsc["version"]):
+        if not re_valid_version.match(self.pkg.dsc["version"]):
             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
 
-        # Bumping the version number of the .dsc breaks extraction by stable's
-        # dpkg-source.  So let's not do that...
-        if self.pkg.dsc["format"] != "1.0":
-            self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
+        # Only a limited list of source formats are allowed in each suite
+        for dist in self.pkg.changes["distribution"].keys():
+            allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
+            if self.pkg.dsc["format"] not in allowed:
+                self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
 
         # Validate the Maintainer field
         try:
@@ -974,11 +972,6 @@ class Upload(object):
         for field_name in [ "build-depends", "build-depends-indep" ]:
             field = self.pkg.dsc.get(field_name)
             if field:
-                # Check for broken dpkg-dev lossage...
-                if field.startswith("ARRAY"):
-                    self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
-                                        (dsc_filename, field_name.title()))
-
                 # Have apt try to parse them...
                 try:
                     apt_pkg.ParseSrcDepends(field)
@@ -992,24 +985,14 @@ class Upload(object):
         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
 
-        # Ensure there is a .tar.gz in the .dsc file
-        has_tar = False
-        for f in dsc_files.keys():
-            m = re_issource.match(f)
-            if not m:
-                self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
-                continue
-            ftype = m.group(3)
-            if ftype == "orig.tar.gz" or ftype == "tar.gz":
-                has_tar = True
-
-        if not has_tar:
-            self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
+        # Ensure the Files field contain only what's expected
+        self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
 
         # Ensure source is newer than existing source in target suites
+        session = DBConn().session()
         self.check_source_against_db(dsc_filename, session)
-
-        self.check_dsc_against_db(dsc_filename)
+        self.check_dsc_against_db(dsc_filename, session)
+        session.close()
 
         return True
 
@@ -1023,8 +1006,8 @@ class Upload(object):
 
         # Find the .dsc (again)
         dsc_filename = None
-        for f in self.files.keys():
-            if files[f]["type"] == "dsc":
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f]["type"] == "dsc":
                 dsc_filename = f
 
         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
@@ -1032,7 +1015,7 @@ class Upload(object):
             return
 
         # Create a symlink mirror of the source files in our temporary directory
-        for f in self.files.keys():
+        for f in self.pkg.files.keys():
             m = re_issource.match(f)
             if m:
                 src = os.path.join(source_dir, f)
@@ -1040,30 +1023,33 @@ class Upload(object):
                 if not os.path.exists(src):
                     return
                 ftype = m.group(3)
-                if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
+                if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
+                   self.pkg.orig_files[f].has_key("path"):
                     continue
                 dest = os.path.join(os.getcwd(), f)
                 os.symlink(src, dest)
 
-        # If the orig.tar.gz is not a part of the upload, create a symlink to the
-        # existing copy.
-        if self.pkg.orig_tar_gz:
-            dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
-            os.symlink(self.pkg.orig_tar_gz, dest)
+        # If the orig files are not a part of the upload, create symlinks to the
+        # existing copies.
+        for orig_file in self.pkg.orig_files.keys():
+            if not self.pkg.orig_files[orig_file].has_key("path"):
+                continue
+            dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
+            os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
 
         # Extract the source
         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
         (result, output) = commands.getstatusoutput(cmd)
         if (result != 0):
             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
-            self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
+            self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
             return
 
         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
             return
 
         # Get the upstream version
-        upstr_version = re_no_epoch.sub('', dsc["version"])
+        upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
         if re_strip_revision.search(upstr_version):
             upstr_version = re_strip_revision.sub('', upstr_version)
 
@@ -1087,15 +1073,11 @@ class Upload(object):
             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
 
     def check_source(self):
-        # XXX: I'm fairly sure reprocess == 2 can never happen
-        #      AJT disabled the is_incoming check years ago - mhy
-        #      We should probably scrap or rethink the whole reprocess thing
         # Bail out if:
         #    a) there's no source
-        # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
-        # or c) the orig.tar.gz is MIA
-        if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
-           or self.pkg.orig_tar_gz == -1:
+        # or c) the orig files are MIA
+        if not self.pkg.changes["architecture"].has_key("source") \
+           or len(self.pkg.orig_files) == 0:
             return
 
         tmpdir = utils.temp_dirname()
@@ -1114,6 +1096,7 @@ class Upload(object):
             shutil.rmtree(tmpdir)
         except OSError, e:
             if e.errno != errno.EACCES:
+                print "foobar"
                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
 
             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
@@ -1124,7 +1107,8 @@ class Upload(object):
             if result != 0:
                 utils.fubar("'%s' failed with result %s." % (cmd, result))
             shutil.rmtree(tmpdir)
-        except:
+        except Exception, e:
+            print "foobar2 (%s)" % e
             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
 
     ###########################################################################
@@ -1138,7 +1122,7 @@ class Upload(object):
 
         # We need to deal with the original changes blob, as the fields we need
         # might not be in the changes dict serialised into the .dak anymore.
-        orig_changes = parse_deb822(self.pkg.changes['filecontents'])
+        orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
 
         # Copy the checksums over to the current changes dict.  This will keep
         # the existing modifications to it intact.
@@ -1164,7 +1148,7 @@ class Upload(object):
                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
                     self.rejects.append(j)
 
-    def check_hashes():
+    def check_hashes(self):
         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
             self.rejects.append(m)
 
@@ -1177,8 +1161,201 @@ class Upload(object):
         for m in utils.check_size(".dsc", self.pkg.dsc_files):
             self.rejects.append(m)
 
-        for m in utils.ensure_hashes(self.pkg.changes, dsc, files, dsc_files):
-            self.rejects.append(m)
+        self.ensure_hashes()
+
+    ###########################################################################
+
+    def ensure_orig(self, target_dir='.', session=None):
+        """
+        Ensures that all orig files mentioned in the changes file are present
+        in target_dir. If they do not exist, they are symlinked into place.
+
+        An list containing the symlinks that were created are returned (so they
+        can be removed).
+        """
+
+        symlinked = []
+        cnf = Config()
+
+        for filename, entry in self.pkg.dsc_files.iteritems():
+            if not re_is_orig_source.match(filename):
+                # File is not an orig; ignore
+                continue
+
+            if os.path.exists(filename):
+                # File exists, no need to continue
+                continue
+
+            def symlink_if_valid(path):
+                f = utils.open_file(path)
+                md5sum = apt_pkg.md5sum(f)
+                f.close()
+
+                fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
+                expected = (int(entry['size']), entry['md5sum'])
+
+                if fingerprint != expected:
+                    return False
+
+                dest = os.path.join(target_dir, filename)
+
+                os.symlink(path, dest)
+                symlinked.append(dest)
+
+                return True
+
+            session_ = session
+            if session is None:
+                session_ = DBConn().session()
+
+            found = False
+
+            # Look in the pool
+            for poolfile in get_poolfile_like_name('/%s' % filename, session_):
+                poolfile_path = os.path.join(
+                    poolfile.location.path, poolfile.filename
+                )
+
+                if symlink_if_valid(poolfile_path):
+                    found = True
+                    break
+
+            if session is None:
+                session_.close()
+
+            if found:
+                continue
+
+            # Look in some other queues for the file
+            queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
+                'OldProposedUpdates', 'Embargoed', 'Unembargoed')
+
+            for queue in queues:
+                if not cnf.get('Dir::Queue::%s' % queue):
+                    continue
+
+                queuefile_path = os.path.join(
+                    cnf['Dir::Queue::%s' % queue], filename
+                )
+
+                if not os.path.exists(queuefile_path):
+                    # Does not exist in this queue
+                    continue
+
+                if symlink_if_valid(queuefile_path):
+                    break
+
+        return symlinked
+
+    ###########################################################################
+
+    def check_lintian(self):
+        cnf = Config()
+
+        # Don't reject binary uploads
+        if not self.pkg.changes['architecture'].has_key('source'):
+            return
+
+        # Only check some distributions
+        valid_dist = False
+        for dist in ('unstable', 'experimental'):
+            if dist in self.pkg.changes['distribution']:
+                valid_dist = True
+                break
+
+        if not valid_dist:
+            return
+
+        tagfile = cnf.get("Dinstall::LintianTags")
+        if tagfile is None:
+            # We don't have a tagfile, so just don't do anything.
+            return
+
+        # Parse the yaml file
+        sourcefile = file(tagfile, 'r')
+        sourcecontent = sourcefile.read()
+        sourcefile.close()
+        try:
+            lintiantags = yaml.load(sourcecontent)['lintian']
+        except yaml.YAMLError, msg:
+            utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
+            return
+
+        # Try and find all orig mentioned in the .dsc
+        symlinked = self.ensure_orig()
+
+        # Now setup the input file for lintian. lintian wants "one tag per line" only,
+        # so put it together like it. We put all types of tags in one file and then sort
+        # through lintians output later to see if its a fatal tag we detected, or not.
+        # So we only run lintian once on all tags, even if we might reject on some, but not
+        # reject on others.
+        # Additionally build up a set of tags
+        tags = set()
+        (fd, temp_filename) = utils.temp_filename()
+        temptagfile = os.fdopen(fd, 'w')
+        for tagtype in lintiantags:
+            for tag in lintiantags[tagtype]:
+                temptagfile.write("%s\n" % tag)
+                tags.add(tag)
+        temptagfile.close()
+
+        # So now we should look at running lintian at the .changes file, capturing output
+        # to then parse it.
+        command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
+        (result, output) = commands.getstatusoutput(command)
+
+        # We are done with lintian, remove our tempfile and any symlinks we created
+        os.unlink(temp_filename)
+        for symlink in symlinked:
+            os.unlink(symlink)
+
+        if (result == 2):
+            utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
+            utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
+
+        if len(output) == 0:
+            return
+
+        def log(*txt):
+            if self.logger:
+                self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
+
+        # We have output of lintian, this package isn't clean. Lets parse it and see if we
+        # are having a victim for a reject.
+        # W: tzdata: binary-without-manpage usr/sbin/tzconfig
+        for line in output.split('\n'):
+            m = re_parse_lintian.match(line)
+            if m is None:
+                continue
+
+            etype = m.group(1)
+            epackage = m.group(2)
+            etag = m.group(3)
+            etext = m.group(4)
+
+            # So lets check if we know the tag at all.
+            if etag not in tags:
+                continue
+
+            if etype == 'O':
+                # We know it and it is overriden. Check that override is allowed.
+                if etag in lintiantags['warning']:
+                    # The tag is overriden, and it is allowed to be overriden.
+                    # Don't add a reject message.
+                    pass
+                elif etag in lintiantags['error']:
+                    # The tag is overriden - but is not allowed to be
+                    self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
+                    log("ftpmaster does not allow tag to be overridable", etag)
+            else:
+                # Tag is known, it is not overriden, direct reject.
+                self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
+                # Now tell if they *might* override it.
+                if etag in lintiantags['warning']:
+                    log("auto rejecting", "overridable", etag)
+                    self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
+                else:
+                    log("auto rejecting", "not overridable", etag)
 
     ###########################################################################
     def check_urgency(self):
@@ -1199,11 +1376,13 @@ class Upload(object):
     #  travel can cause errors on extraction]
 
     def check_timestamps(self):
+        Cnf = Config()
+
         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
         tar = TarTime(future_cutoff, past_cutoff)
 
-        for filename, entry in self.pkg.files.keys():
+        for filename, entry in self.pkg.files.items():
             if entry["type"] == "deb":
                 tar.reset()
                 try:
@@ -1239,89 +1418,276 @@ class Upload(object):
                 except:
                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
 
+    def check_if_upload_is_sponsored(self, uid_email, uid_name):
+        if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
+            sponsored = False
+        elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
+            sponsored = False
+            if uid_name == "":
+                sponsored = True
+        else:
+            sponsored = True
+            if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
+                sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
+                if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
+                    self.pkg.changes["changedbyemail"] not in sponsor_addresses):
+                        self.pkg.changes["sponsoremail"] = uid_email
+
+        return sponsored
+
+
     ###########################################################################
+    # check_signed_by_key checks
+    ###########################################################################
+
     def check_signed_by_key(self):
         """Ensure the .changes is signed by an authorized uploader."""
         session = DBConn().session()
 
-        (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
-
-        # match claimed name with actual name:
-        if uid is None:
-            # This is fundamentally broken but need us to refactor how we get
-            # the UIDs/Fingerprints in order for us to fix it properly
-            uid, uid_email = self.pkg.changes["fingerprint"], uid
-            may_nmu, may_sponsor = 1, 1
-            # XXX by default new dds don't have a fingerprint/uid in the db atm,
-            #     and can't get one in there if we don't allow nmu/sponsorship
-        elif is_dm is False:
-            # If is_dm is False, we allow full upload rights
-            uid_email = "%s@debian.org" % (uid)
-            may_nmu, may_sponsor = 1, 1
-        else:
-            # Assume limited upload rights unless we've discovered otherwise
-            uid_email = uid
-            may_nmu, may_sponsor = 0, 0
+        # First of all we check that the person has proper upload permissions
+        # and that this upload isn't blocked
+        fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
 
-        if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
-            sponsored = 0
-        elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
-            sponsored = 0
-            if uid_name == "": sponsored = 1
+        if fpr is None:
+            self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
+            return
+
+        # TODO: Check that import-keyring adds UIDs properly
+        if not fpr.uid:
+            self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
+            return
+
+        # Check that the fingerprint which uploaded has permission to do so
+        self.check_upload_permissions(fpr, session)
+
+        # Check that this package is not in a transition
+        self.check_transition(session)
+
+        session.close()
+
+
+    def check_upload_permissions(self, fpr, session):
+        # Check any one-off upload blocks
+        self.check_upload_blocks(fpr, session)
+
+        # Start with DM as a special case
+        # DM is a special case unfortunately, so we check it first
+        # (keys with no source access get more access than DMs in one
+        #  way; DMs can only upload for their packages whether source
+        #  or binary, whereas keys with no access might be able to
+        #  upload some binaries)
+        if fpr.source_acl.access_level == 'dm':
+            self.check_dm_upload(fpr, session)
         else:
-            sponsored = 1
-            if ("source" in self.pkg.changes["architecture"] and
-                uid_email and utils.is_email_alias(uid_email)):
-                sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
-                if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
-                    self.pkg.changes["changedbyemail"] not in sponsor_addresses):
-                    self.pkg.changes["sponsoremail"] = uid_email
-
-        if sponsored and not may_sponsor:
-            self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
-
-        if not sponsored and not may_nmu:
-            should_reject = True
-            highest_sid, highest_version = None, None
-
-            # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
-            #      It ignores higher versions with the dm_upload_allowed flag set to false
-            #      I'm keeping the existing behaviour for now until I've gone back and
-            #      checked exactly what the GR says - mhy
-            for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
-                if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
-                     highest_sid = si.source_id
-                     highest_version = si.version
-
-            if highest_sid is None:
-                self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
+            # Check source-based permissions for other types
+            if self.pkg.changes["architecture"].has_key("source"):
+                if fpr.source_acl.access_level is None:
+                    rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+                    rej += '\nPlease contact ftpmaster if you think this is incorrect'
+                    self.rejects.append(rej)
+                    return
+            else:
+                # If not a DM, we allow full upload rights
+                uid_email = "%s@debian.org" % (fpr.uid.uid)
+                self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
+
+
+        # Check binary upload permissions
+        # By this point we know that DMs can't have got here unless they
+        # are allowed to deal with the package concerned so just apply
+        # normal checks
+        if fpr.binary_acl.access_level == 'full':
+            return
+
+        # Otherwise we're in the map case
+        tmparches = self.pkg.changes["architecture"].copy()
+        tmparches.pop('source', None)
+
+        for bam in fpr.binary_acl_map:
+            tmparches.pop(bam.architecture.arch_string, None)
+
+        if len(tmparches.keys()) > 0:
+            if fpr.binary_reject:
+                rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+                rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+                self.rejects.append(rej)
             else:
-                for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
-                    (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
-                    if email == uid_email or name == uid_name:
-                        should_reject = False
-                        break
-
-            if should_reject is True:
-                self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
-
-            for b in self.pkg.changes["binary"].keys():
-                for suite in self.pkg.changes["distribution"].keys():
-                    q = session.query(DBSource)
-                    q = q.join(DBBinary).filter_by(package=b)
-                    q = q.join(BinAssociation).join(Suite).filter_by(suite)
-
-                    for s in q.all():
-                        if s.source != self.pkg.changes["source"]:
-                            self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
-
-            for f in self.pkg.files.keys():
-                if self.pkg.files[f].has_key("byhand"):
-                    self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
-                if self.pkg.files[f].has_key("new"):
-                    self.rejects.append("%s may not upload NEW file %s" % (uid, f))
+                # TODO: This is where we'll implement reject vs throw away binaries later
+                rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
+                rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
+                rej += "\nFingerprint: %s", (fpr.fingerprint)
+                self.rejects.append(rej)
+
+
+    def check_upload_blocks(self, fpr, session):
+        """Check whether any upload blocks apply to this source, source
+           version, uid / fpr combination"""
+
+        def block_rej_template(fb):
+            rej = 'Manual upload block in place for package %s' % fb.source
+            if fb.version is not None:
+                rej += ', version %s' % fb.version
+            return rej
+
+        for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
+            # version is None if the block applies to all versions
+            if fb.version is None or fb.version == self.pkg.changes['version']:
+                # Check both fpr and uid - either is enough to cause a reject
+                if fb.fpr is not None:
+                    if fb.fpr.fingerprint == fpr.fingerprint:
+                        self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
+                if fb.uid is not None:
+                    if fb.uid == fpr.uid:
+                        self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
+
+
+    def check_dm_upload(self, fpr, session):
+        # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
+        ## none of the uploaded packages are NEW
+        rej = False
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f].has_key("byhand"):
+                self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
+                rej = True
+            if self.pkg.files[f].has_key("new"):
+                self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
+                rej = True
+
+        if rej:
+            return
+
+        ## the most recent version of the package uploaded to unstable or
+        ## experimental includes the field "DM-Upload-Allowed: yes" in the source
+        ## section of its control file
+        q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
+        q = q.join(SrcAssociation)
+        q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
+        q = q.order_by(desc('source.version')).limit(1)
+
+        r = q.all()
+
+        if len(r) != 1:
+            rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
+            self.rejects.append(rej)
+            return
+
+        r = r[0]
+        if not r.dm_upload_allowed:
+            rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
+            self.rejects.append(rej)
+            return
+
+        ## the Maintainer: field of the uploaded .changes file corresponds with
+        ## the owner of the key used (ie, non-developer maintainers may not sponsor
+        ## uploads)
+        if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
+            self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
+
+        ## the most recent version of the package uploaded to unstable or
+        ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
+        ## non-developer maintainers cannot NMU or hijack packages)
+
+        # srcuploaders includes the maintainer
+        accept = False
+        for sup in r.srcuploaders:
+            (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
+            # Eww - I hope we never have two people with the same name in Debian
+            if email == fpr.uid.uid or name == fpr.uid.name:
+                accept = True
+                break
+
+        if not accept:
+            self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
+            return
+
+        ## none of the packages are being taken over from other source packages
+        for b in self.pkg.changes["binary"].keys():
+            for suite in self.pkg.changes["distribution"].keys():
+                q = session.query(DBSource)
+                q = q.join(DBBinary).filter_by(package=b)
+                q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
+
+                for s in q.all():
+                    if s.source != self.pkg.changes["source"]:
+                        self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
+
+
+
+    def check_transition(self, session):
+        cnf = Config()
+
+        sourcepkg = self.pkg.changes["source"]
+
+        # No sourceful upload -> no need to do anything else, direct return
+        # We also work with unstable uploads, not experimental or those going to some
+        # proposed-updates queue
+        if "source" not in self.pkg.changes["architecture"] or \
+           "unstable" not in self.pkg.changes["distribution"]:
+            return
+
+        # Also only check if there is a file defined (and existant) with
+        # checks.
+        transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
+        if transpath == "" or not os.path.exists(transpath):
+            return
+
+        # Parse the yaml file
+        sourcefile = file(transpath, 'r')
+        sourcecontent = sourcefile.read()
+        try:
+            transitions = yaml.load(sourcecontent)
+        except yaml.YAMLError, msg:
+            # This shouldn't happen, there is a wrapper to edit the file which
+            # checks it, but we prefer to be safe than ending up rejecting
+            # everything.
+            utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
+            return
+
+        # Now look through all defined transitions
+        for trans in transitions:
+            t = transitions[trans]
+            source = t["source"]
+            expected = t["new"]
+
+            # Will be None if nothing is in testing.
+            current = get_source_in_suite(source, "testing", session)
+            if current is not None:
+                compare = apt_pkg.VersionCompare(current.version, expected)
+
+            if current is None or compare < 0:
+                # This is still valid, the current version in testing is older than
+                # the new version we wait for, or there is none in testing yet
+
+                # Check if the source we look at is affected by this.
+                if sourcepkg in t['packages']:
+                    # The source is affected, lets reject it.
+
+                    rejectmsg = "%s: part of the %s transition.\n\n" % (
+                        sourcepkg, trans)
 
+                    if current is not None:
+                        currentlymsg = "at version %s" % (current.version)
+                    else:
+                        currentlymsg = "not present in testing"
+
+                    rejectmsg += "Transition description: %s\n\n" % (t["reason"])
+
+                    rejectmsg += "\n".join(textwrap.wrap("""Your package
+is part of a testing transition designed to get %s migrated (it is
+currently %s, we need version %s).  This transition is managed by the
+Release Team, and %s is the Release-Team member responsible for it.
+Please mail debian-release@lists.debian.org or contact %s directly if you
+need further assistance.  You might want to upload to experimental until this
+transition is done."""
+                            % (source, currentlymsg, expected,t["rm"], t["rm"])))
+
+                    self.rejects.append(rejectmsg)
+                    return
+
+    ###########################################################################
+    # End check_signed_by_key checks
     ###########################################################################
+
     def build_summaries(self):
         """ Build a summary of changes the upload introduces. """
 
@@ -1371,6 +1737,7 @@ class Upload(object):
         for bug in bugs:
             summary += "%s " % (bug)
             if action:
+                self.update_subst()
                 self.Subst["__BUG_NUMBER__"] = bug
                 if self.pkg.changes["distribution"].has_key("stable"):
                     self.Subst["__STABLE_WARNING__"] = """
@@ -1381,15 +1748,15 @@ The update will eventually make its way into the next released Debian
 distribution."""
                 else:
                     self.Subst["__STABLE_WARNING__"] = ""
-                    mail_message = utils.TemplateSubst(self.Subst, template)
-                    utils.send_mail(mail_message)
+                mail_message = utils.TemplateSubst(self.Subst, template)
+                utils.send_mail(mail_message)
 
                 # Clear up after ourselves
                 del self.Subst["__BUG_NUMBER__"]
                 del self.Subst["__STABLE_WARNING__"]
 
-        if action:
-            self.Logger.log(["closing bugs"] + bugs)
+        if action and self.logger:
+            self.logger.log(["closing bugs"] + bugs)
 
         summary += "\n"
 
@@ -1426,7 +1793,7 @@ distribution."""
         self.Subst["__SHORT_SUMMARY__"] = short_summary
 
         for dist in self.pkg.changes["distribution"].keys():
-            announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
+            announce_list = cnf.Find("Suite::%s::Announce" % (dist))
             if announce_list == "" or lists_done.has_key(announce_list):
                 continue
 
@@ -1434,6 +1801,7 @@ distribution."""
             summary += "Announcing to %s\n" % (announce_list)
 
             if action:
+                self.update_subst()
                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
                 if cnf.get("Dinstall::TrackingServer") and \
                    self.pkg.changes["architecture"].has_key("source"):
@@ -1454,12 +1822,12 @@ distribution."""
 
     ###########################################################################
 
-    def accept (self, summary, short_summary, targetdir=None):
+    def accept (self, summary, short_summary, session):
         """
         Accept an upload.
 
-        This moves all files referenced from the .changes into the I{accepted}
-        queue, sends the accepted mail, announces to lists, closes bugs and
+        This moves all files referenced from the .changes into the pool,
+        sends the accepted mail, announces to lists, closes bugs and
         also checks for override disparities. If enabled it will write out
         the version history for the BTS Version Tracking and will finally call
         L{queue_build}.
@@ -1469,37 +1837,93 @@ distribution."""
 
         @type short_summary: string
         @param short_summary: Short summary
-
         """
 
         cnf = Config()
         stats = SummaryStats()
 
-        accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
+        print "Installing."
+        self.logger.log(["installing changes", self.pkg.changes_file])
+
+        # Add the .dsc file to the DB first
+        for newfile, entry in self.pkg.files.items():
+            if entry["type"] == "dsc":
+                dsc_component, dsc_location_id = add_dsc_to_db(self, newfile, session)
+
+        # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
+        for newfile, entry in self.pkg.files.items():
+            if entry["type"] == "deb":
+                add_deb_to_db(self, newfile, session)
 
-        if targetdir is None:
-            targetdir = cnf["Dir::Queue::Accepted"]
+        # If this is a sourceful diff only upload that is moving
+        # cross-component we need to copy the .orig files into the new
+        # component too for the same reasons as above.
+        if self.pkg.changes["architecture"].has_key("source"):
+            for orig_file in self.pkg.orig_files.keys():
+                if not self.pkg.orig_files[orig_file].has_key("id"):
+                    continue # Skip if it's not in the pool
+                orig_file_id = self.pkg.orig_files[orig_file]["id"]
+                if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
+                    continue # Skip if the location didn't change
+
+                # Do the move
+                oldf = get_poolfile_by_id(orig_file_id, session)
+                old_filename = os.path.join(oldf.location.path, oldf.filename)
+                old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
+                           'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
+
+                new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
+
+                # TODO: Care about size/md5sum collisions etc
+                (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
+
+                if newf is None:
+                    utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
+                    newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
+
+                    # TODO: Check that there's only 1 here
+                    source = get_sources_from_name(self.pkg.changes["source"], self.pkg.changes["version"])[0]
+                    dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
+                    dscf.poolfile_id = newf.file_id
+                    session.add(dscf)
+                    session.flush()
+
+        # Install the files into the pool
+        for newfile, entry in self.pkg.files.items():
+            destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
+            utils.move(newfile, destination)
+            self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
+            stats.accept_bytes += float(entry["size"])
 
-        print "Accepting."
-        self.Logger.log(["Accepting changes", self.pkg.changes_file])
+        # Copy the .changes file across for suite which need it.
+        copy_changes = {}
+        for suite_name in self.pkg.changes["distribution"].keys():
+            if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
+                copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
 
-        self.write_dot_dak(targetdir)
+        for dest in copy_changes.keys():
+            utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
 
-        # Move all the files into the accepted directory
-        utils.move(self.pkg.changes_file, targetdir)
+        # We're done - commit the database changes
+        session.commit()
+        # Our SQL session will automatically start a new transaction after
+        # the last commit
 
-        for name, entry in sorted(self.pkg.files.items()):
-            utils.move(name, targetdir)
-            stats.accept_bytes += float(entry["size"])
+        # Move the .changes into the 'done' directory
+        utils.move(self.pkg.changes_file,
+                   os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
 
-        stats.accept_count += 1
+        if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
+            UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
 
         # Send accept mail, announce to lists, close bugs and check for
         # override disparities
         if not cnf["Dinstall::Options::No-Mail"]:
+            self.update_subst()
             self.Subst["__SUITE__"] = ""
             self.Subst["__SUMMARY__"] = summary
-            mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
+            mail_message = utils.TemplateSubst(self.Subst,
+                                               os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
             utils.send_mail(mail_message)
             self.announce(short_summary, 1)
 
@@ -1537,24 +1961,16 @@ distribution."""
             os.rename(temp_filename, filename)
             os.chmod(filename, 0644)
 
-        # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
-        # <Ganneff> we do call queue_build too
-        # <mhy> well yes, we'd have had to if we were inserting into accepted
-        # <Ganneff> now. thats database only.
-        # <mhy> urgh, that's going to get messy
-        # <Ganneff> so i make the p-n call to it *also* using accepted/
-        # <mhy> but then the packages will be in the queue_build table without the files being there
-        # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
-        # <mhy> ah, good point
-        # <Ganneff> so it will work out, as unchecked move it over
-        # <mhy> that's all completely sick
-        # <Ganneff> yes
-
-        # This routine returns None on success or an error on failure
-        res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
-        if res:
-            utils.fubar(res)
+        # auto-build queue
+#        res = get_or_set_queue('buildd', session).autobuild_upload(self.pkg, session)
+#        if res:
+#            utils.fubar(res)
+#            now_date = datetime.now()
+
+        session.commit()
 
+        # Finally...
+        stats.accept_count += 1
 
     def check_override(self):
         """
@@ -1582,6 +1998,7 @@ distribution."""
 
         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
 
+        self.update_subst()
         self.Subst["__SUMMARY__"] = summary
         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
         utils.send_mail(mail_message)
@@ -1589,18 +2006,24 @@ distribution."""
 
     ###########################################################################
 
-    def remove(self, dir=None):
+    def remove(self, from_dir=None):
         """
         Used (for instance) in p-u to remove the package from unchecked
+
+        Also removes the package from holding area.
         """
-        if dir is None:
-            os.chdir(self.pkg.directory)
-        else:
-            os.chdir(dir)
+        if from_dir is None:
+            from_dir = self.pkg.directory
+        h = Holding()
 
         for f in self.pkg.files.keys():
-            os.unlink(f)
-        os.unlink(self.pkg.changes_file)
+            os.unlink(os.path.join(from_dir, f))
+            if os.path.exists(os.path.join(h.holding_dir, f)):
+                os.unlink(os.path.join(h.holding_dir, f))
+                          
+        os.unlink(os.path.join(from_dir, self.pkg.changes_file))
+        if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
+            os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
 
     ###########################################################################
 
@@ -1608,9 +2031,11 @@ distribution."""
         """
         Move files to dest with certain perms/changesperms
         """
-        utils.move(self.pkg.changes_file, dest, perms=changesperms)
+        h = Holding()
+        utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
+                   dest, perms=changesperms)
         for f in self.pkg.files.keys():
-            utils.move(f, dest, perms=perms)
+            utils.move(os.path.join(h.holding_dir, f), dest, perms=perms)
 
     ###########################################################################
 
@@ -1728,10 +2153,11 @@ distribution."""
 
         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
 
+        self.update_subst()
         if not manual:
             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
-            self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
+            self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
             os.write(reason_fd, reject_message)
             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
         else:
@@ -1739,7 +2165,7 @@ distribution."""
             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
-            self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
+            self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
             # Write the rejection email out as the <foo>.reason file
             os.write(reason_fd, reject_mail_message)
@@ -1754,12 +2180,13 @@ distribution."""
         if not cnf["Dinstall::Options::No-Mail"]:
             utils.send_mail(reject_mail_message)
 
-        self.Logger.log(["rejected", pkg.changes_file])
+        if self.logger:
+            self.logger.log(["rejected", self.pkg.changes_file])
 
         return 0
 
     ################################################################################
-    def in_override_p(self, package, component, suite, binary_type, file, session=None):
+    def in_override_p(self, package, component, suite, binary_type, filename, session):
         """
         Check if a package already has override entries in the DB
 
@@ -1775,8 +2202,8 @@ distribution."""
         @type binary_type: string
         @param binary_type: type of the package
 
-        @type file: string
-        @param file: filename we check
+        @type filename: string
+        @param filename: filename we check
 
         @return: the database result. But noone cares anyway.
 
@@ -1784,9 +2211,6 @@ distribution."""
 
         cnf = Config()
 
-        if session is None:
-            session = DBConn().session()
-
         if binary_type == "": # must be source
             file_type = "dsc"
         else:
@@ -1805,8 +2229,8 @@ distribution."""
         # Remember the section and priority so we can check them later if appropriate
         if len(result) > 0:
             result = result[0]
-            self.pkg.files[file]["override section"] = result.section.section
-            self.pkg.files[file]["override priority"] = result.priority.priority
+            self.pkg.files[filename]["override section"] = result.section.section
+            self.pkg.files[filename]["override priority"] = result.priority.priority
             return result
 
         return None
@@ -1822,8 +2246,9 @@ distribution."""
 
         Description: TODO
         """
+        Cnf = Config()
         anyversion = None
-        anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
+        anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
         for (s, v) in sv_list:
             if s in [ x.lower() for x in anysuite ]:
                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
@@ -1833,13 +2258,13 @@ distribution."""
 
     ################################################################################
 
-    def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
+    def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
         """
         @type sv_list: list
         @param sv_list: list of (suite, version) tuples to check
 
-        @type file: string
-        @param file: XXX
+        @type filename: string
+        @param filename: XXX
 
         @type new_version: string
         @param new_version: XXX
@@ -1864,7 +2289,7 @@ distribution."""
                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
 
                 if suite in must_be_newer_than and sourceful and vercmp < 1:
-                    self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
+                    self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
 
                 if suite in must_be_older_than and vercmp > -1:
                     cansave = 0
@@ -1897,7 +2322,7 @@ distribution."""
                             self.rejects.append("Won't propogate NEW packages.")
                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
                             # propogation would be redundant. no need to reject though.
-                            self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
+                            self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
                             cansave = 1
                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
@@ -1908,37 +2333,31 @@ distribution."""
                             cansave = 1
 
                     if not cansave:
-                        self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
+                        self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
 
     ################################################################################
-    def check_binary_against_db(self, file, session=None):
-        if session is None:
-            session = DBConn().session()
-
+    def check_binary_against_db(self, filename, session):
         # Ensure version is sane
         q = session.query(BinAssociation)
-        q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
-        q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
+        q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
+        q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
 
         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
-                                       file, files[file]["version"], sourceful=False)
+                                       filename, self.pkg.files[filename]["version"], sourceful=False)
 
         # Check for any existing copies of the file
-        q = session.query(DBBinary).filter_by(files[file]["package"])
-        q = q.filter_by(version=files[file]["version"])
-        q = q.join(Architecture).filter_by(arch_string=files[file]["architecture"])
+        q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
+        q = q.filter_by(version=self.pkg.files[filename]["version"])
+        q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
 
         if q.count() > 0:
-            self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
+            self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
 
     ################################################################################
 
-    def check_source_against_db(self, file, session=None):
+    def check_source_against_db(self, filename, session):
         """
         """
-        if session is None:
-            session = DBConn().session()
-
         source = self.pkg.dsc.get("source")
         version = self.pkg.dsc.get("version")
 
@@ -1947,24 +2366,23 @@ distribution."""
         q = q.join(DBSource).filter(DBSource.source==source)
 
         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
-                                       file, version, sourceful=True)
+                                       filename, version, sourceful=True)
 
     ################################################################################
-    def check_dsc_against_db(self, file, session=None):
+    def check_dsc_against_db(self, filename, session):
         """
 
         @warning: NB: this function can remove entries from the 'files' index [if
-         the .orig.tar.gz is a duplicate of the one in the archive]; if
+         the orig tarball is a duplicate of the one in the archive]; if
          you're iterating over 'files' and call this function as part of
          the loop, be sure to add a check to the top of the loop to
          ensure you haven't just tried to dereference the deleted entry.
 
         """
 
-        if session is None:
-            session = DBConn().session()
-
-        self.pkg.orig_tar_gz = None
+        Cnf = Config()
+        self.pkg.orig_files = {} # XXX: do we need to clear it?
+        orig_files = self.pkg.orig_files
 
         # Try and find all files mentioned in the .dsc.  This has
         # to work harder to cope with the multiple possible
@@ -1979,7 +2397,7 @@ distribution."""
                 found = "%s in incoming" % (dsc_name)
 
                 # Check the file does not already exist in the archive
-                ql = get_poolfile_like_name(dsc_name)
+                ql = get_poolfile_like_name(dsc_name, session)
 
                 # Strip out anything that isn't '%s' or '/%s$'
                 for i in ql:
@@ -1998,7 +2416,7 @@ distribution."""
                 if len(ql) > 0:
                     # Ignore exact matches for .orig.tar.gz
                     match = 0
-                    if dsc_name.endswith(".orig.tar.gz"):
+                    if re_is_orig_source.match(dsc_name):
                         for i in ql:
                             if self.pkg.files.has_key(dsc_name) and \
                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
@@ -2007,14 +2425,16 @@ distribution."""
                                 # TODO: Don't delete the entry, just mark it as not needed
                                 # This would fix the stupidity of changing something we often iterate over
                                 # whilst we're doing it
-                                del files[dsc_name]
-                                self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
+                                del self.pkg.files[dsc_name]
+                                if not orig_files.has_key(dsc_name):
+                                    orig_files[dsc_name] = {}
+                                orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
                                 match = 1
 
                     if not match:
                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
 
-            elif dsc_name.endswith(".orig.tar.gz"):
+            elif re_is_orig_source.match(dsc_name):
                 # Check in the pool
                 ql = get_poolfile_like_name(dsc_name, session)
 
@@ -2048,51 +2468,97 @@ distribution."""
                     old_file_fh.close()
                     actual_size = os.stat(old_file)[stat.ST_SIZE]
                     found = old_file
-                    suite_type = f.location.archive_type
+                    suite_type = x.location.archive_type
                     # need this for updating dsc_files in install()
-                    dsc_entry["files id"] = f.file_id
+                    dsc_entry["files id"] = x.file_id
                     # See install() in process-accepted...
-                    self.pkg.orig_tar_id = f.file_id
-                    self.pkg.orig_tar_gz = old_file
-                    self.pkg.orig_tar_location = f.location.location_id
+                    if not orig_files.has_key(dsc_name):
+                        orig_files[dsc_name] = {}
+                    orig_files[dsc_name]["id"] = x.file_id
+                    orig_files[dsc_name]["path"] = old_file
+                    orig_files[dsc_name]["location"] = x.location.location_id
                 else:
                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
                     # Not there? Check the queue directories...
                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
-                        in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
+                        if not Cnf.has_key("Dir::Queue::%s" % (directory)):
+                            continue
+                        in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
                         if os.path.exists(in_otherdir):
                             in_otherdir_fh = utils.open_file(in_otherdir)
                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
                             in_otherdir_fh.close()
                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
                             found = in_otherdir
-                            self.pkg.orig_tar_gz = in_otherdir
+                            if not orig_files.has_key(dsc_name):
+                                orig_files[dsc_name] = {}
+                            orig_files[dsc_name]["path"] = in_otherdir
 
                     if not found:
-                        self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
-                        self.pkg.orig_tar_gz = -1
+                        self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
                         continue
             else:
-                self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
+                self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
                 continue
             if actual_md5 != dsc_entry["md5sum"]:
-                self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
+                self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
             if actual_size != int(dsc_entry["size"]):
-                self.rejects.append("size for %s doesn't match %s." % (found, file))
+                self.rejects.append("size for %s doesn't match %s." % (found, filename))
+
+    ################################################################################
+    # This is used by process-new and process-holding to recheck a changes file
+    # at the time we're running.  It mainly wraps various other internal functions
+    # and is similar to accepted_checks - these should probably be tidied up
+    # and combined
+    def recheck(self, session):
+        cnf = Config()
+        for f in self.pkg.files.keys():
+            # The .orig.tar.gz can disappear out from under us is it's a
+            # duplicate of one in the archive.
+            if not self.pkg.files.has_key(f):
+                continue
+
+            entry = self.pkg.files[f]
+
+            # Check that the source still exists
+            if entry["type"] == "deb":
+                source_version = entry["source version"]
+                source_package = entry["source package"]
+                if not self.pkg.changes["architecture"].has_key("source") \
+                   and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+                    source_epochless_version = re_no_epoch.sub('', source_version)
+                    dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
+                    found = False
+                    for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
+                        if cnf.has_key("Dir::Queue::%s" % (q)):
+                            if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+                                found = True
+                    if not found:
+                        self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
+
+            # Version and file overwrite checks
+            if entry["type"] == "deb":
+                self.check_binary_against_db(f, session)
+            elif entry["type"] == "dsc":
+                self.check_source_against_db(f, session)
+                self.check_dsc_against_db(f, session)
 
     ################################################################################
-    def accepted_checks(self, overwrite_checks=True, session=None):
+    def accepted_checks(self, overwrite_checks, session):
         # Recheck anything that relies on the database; since that's not
         # frozen between accept and our run time when called from p-a.
 
         # overwrite_checks is set to False when installing to stable/oldstable
 
-        if session is None:
-            session = DBConn().session()
-
         propogate={}
         nopropogate={}
 
+        # Find the .dsc (again)
+        dsc_filename = None
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f]["type"] == "dsc":
+                dsc_filename = f
+
         for checkfile in self.pkg.files.keys():
             # The .orig.tar.gz can disappear out from under us is it's a
             # duplicate of one in the archive.
@@ -2119,7 +2585,7 @@ distribution."""
 
             # propogate in the case it is in the override tables:
             for suite in self.pkg.changes.get("propdistribution", {}).keys():
-                if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
+                if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
                     propogate[suite] = 1
                 else:
                     nopropogate[suite] = 1
@@ -2132,7 +2598,7 @@ distribution."""
         for checkfile in self.pkg.files.keys():
             # Check the package is still in the override tables
             for suite in self.pkg.changes["distribution"].keys():
-                if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
+                if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
 
     ################################################################################
@@ -2143,10 +2609,11 @@ distribution."""
     def do_unaccept(self):
         cnf = Config()
 
+        self.update_subst()
         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
-        self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
+        self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
         if cnf.has_key("Dinstall::Bcc"):
             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])