]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/queue.py
Merge commit 'ftpmaster/master'
[dak.git] / daklib / queue.py
index a1dcdf3ce0efb6edfe2b74830e6d6decd89d7248..890df374c4acef6580d09cfbf4b2ed01d14cee01 100755 (executable)
@@ -26,7 +26,6 @@ Queue utility functions for dak
 
 ###############################################################################
 
-import cPickle
 import errno
 import os
 import pg
@@ -36,8 +35,14 @@ import time
 import apt_inst
 import apt_pkg
 import utils
+import commands
+import shutil
+import textwrap
+import tempfile
 from types import *
 
+import yaml
+
 from dak_exceptions import *
 from changes import *
 from regexes import *
@@ -45,29 +50,30 @@ from config import Config
 from holding import Holding
 from dbconn import *
 from summarystats import SummaryStats
-from utils import parse_changes
+from utils import parse_changes, check_dsc_files
 from textutils import fix_maintainer
+from binary import Binary
 
 ###############################################################################
 
-def get_type(f, session=None):
+def get_type(f, session):
     """
     Get the file type of C{f}
 
     @type f: dict
     @param f: file entry from Changes object
 
+    @type session: SQLA Session
+    @param session: SQL Alchemy session object
+
     @rtype: string
     @return: filetype
 
     """
-    if session is None:
-        session = DBConn().session()
-
     # Determine the type
     if f.has_key("dbtype"):
-        file_type = file["dbtype"]
-    elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+        file_type = f["dbtype"]
+    elif re_source_ext.match(f["type"]):
         file_type = "dsc"
     else:
         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
@@ -112,7 +118,7 @@ def determine_new(changes, files, warn=1):
         pkg = f["package"]
         priority = f["priority"]
         section = f["section"]
-        file_type = get_type(f)
+        file_type = get_type(f, session)
         component = f["component"]
 
         if file_type == "dsc":
@@ -157,6 +163,8 @@ def determine_new(changes, files, warn=1):
             if new[pkg].has_key("othercomponents"):
                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
 
+    session.close()
+
     return new
 
 ################################################################################
@@ -211,7 +219,7 @@ def lookup_uid_from_fingerprint(fpr, session):
     # This is a stupid default, but see the comments below
     is_dm = False
 
-    user = get_uid_from_fingerprint(changes["fingerprint"], session)
+    user = get_uid_from_fingerprint(fpr, session)
 
     if user is not None:
         uid = user.uid
@@ -221,8 +229,8 @@ def lookup_uid_from_fingerprint(fpr, session):
             uid_name = user.name
 
         # Check the relevant fingerprint (which we have to have)
-        for f in uid.fingerprint:
-            if f.fingerprint == changes['fingerprint']:
+        for f in user.fingerprint:
+            if f.fingerprint == fpr:
                 is_dm = f.keyring.debian_maintainer
                 break
 
@@ -230,6 +238,17 @@ def lookup_uid_from_fingerprint(fpr, session):
 
 ###############################################################################
 
+def check_status(files):
+    new = byhand = 0
+    for f in files.keys():
+        if files[f]["type"] == "byhand":
+            byhand = 1
+        elif files[f].has_key("new"):
+            new = 1
+    return (new, byhand)
+
+###############################################################################
+
 # Used by Upload.check_timestamps
 class TarTime(object):
     def __init__(self, future_cutoff, past_cutoff):
@@ -255,6 +274,7 @@ class Upload(object):
 
     """
     def __init__(self):
+        self.logger = None
         self.pkg = Changes()
         self.reset()
 
@@ -302,7 +322,7 @@ class Upload(object):
 
         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
         if not self.pkg.changes.has_key("architecture") or not \
-           isinstance(changes["architecture"], DictType):
+           isinstance(self.pkg.changes["architecture"], DictType):
             self.pkg.changes["architecture"] = { "Unknown" : "" }
 
         # and maintainer2047 may not exist.
@@ -319,7 +339,7 @@ class Upload(object):
            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
 
             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
-            self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
+            self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
         else:
             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
@@ -344,12 +364,13 @@ class Upload(object):
     ###########################################################################
     def load_changes(self, filename):
         """
-        @rtype boolean
+        @rtype: boolean
         @rvalue: whether the changes file was valid or not.  We may want to
                  reject even if this is True (see what gets put in self.rejects).
                  This is simply to prevent us even trying things later which will
                  fail because we couldn't properly parse the file.
         """
+        Cnf = Config()
         self.pkg.changes_file = filename
 
         # Parse the .changes field into a dictionary
@@ -367,7 +388,7 @@ class Upload(object):
 
         # Parse the Files field from the .changes into another dictionary
         try:
-            self.pkg.files.update(build_file_list(self.pkg.changes))
+            self.pkg.files.update(utils.build_file_list(self.pkg.changes))
         except ParseChangesError, line:
             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
             return False
@@ -479,7 +500,7 @@ class Upload(object):
                 (source, dest) = args[1:3]
                 if self.pkg.changes["distribution"].has_key(source):
                     for arch in self.pkg.changes["architecture"].keys():
-                        if arch not in [ arch_string for a in get_suite_architectures(source) ]:
+                        if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
                             del self.pkg.changes["distribution"][source]
                             self.pkg.changes["distribution"][dest] = 1
@@ -604,9 +625,9 @@ class Upload(object):
         entry["maintainer"] = control.Find("Maintainer", "")
 
         if f.endswith(".udeb"):
-            files[f]["dbtype"] = "udeb"
+            self.pkg.files[f]["dbtype"] = "udeb"
         elif f.endswith(".deb"):
-            files[f]["dbtype"] = "deb"
+            self.pkg.files[f]["dbtype"] = "deb"
         else:
             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
 
@@ -622,7 +643,7 @@ class Upload(object):
             source_version = m.group(2)
 
         if not source_version:
-            source_version = files[f]["version"]
+            source_version = self.pkg.files[f]["version"]
 
         entry["source package"] = source
         entry["source version"] = source_version
@@ -680,10 +701,12 @@ class Upload(object):
         # Check the version and for file overwrites
         self.check_binary_against_db(f, session)
 
-        b = Binary(f).scan_package()
-        if len(b.rejects) > 0:
-            for j in b.rejects:
-                self.rejects.append(j)
+        # Temporarily disable contents generation until we change the table storage layout
+        #b = Binary(f)
+        #b.scan_package()
+        #if len(b.rejects) > 0:
+        #    for j in b.rejects:
+        #        self.rejects.append(j)
 
     def source_file_checks(self, f, session):
         entry = self.pkg.files[f]
@@ -701,7 +724,7 @@ class Upload(object):
             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
 
         # Ensure the source version matches the version in the .changes file
-        if entry["type"] == "orig.tar.gz":
+        if re_is_orig_source.match(f):
             changes_version = self.pkg.changes["chopversion2"]
         else:
             changes_version = self.pkg.changes["chopversion"]
@@ -726,11 +749,22 @@ class Upload(object):
     def per_suite_file_checks(self, f, suite, session):
         cnf = Config()
         entry = self.pkg.files[f]
+        archive = utils.where_am_i()
 
         # Skip byhand
         if entry.has_key("byhand"):
             return
 
+        # Check we have fields we need to do these checks
+        oktogo = True
+        for m in ['component', 'package', 'priority', 'size', 'md5sum']:
+            if not entry.has_key(m):
+                self.rejects.append("file '%s' does not have field %s set" % (f, m))
+                oktogo = False
+
+        if not oktogo:
+            return
+
         # Handle component mappings
         for m in cnf.ValueList("ComponentMappings"):
             (source, dest) = m.split()
@@ -745,8 +779,7 @@ class Upload(object):
             return
 
         # Validate the component
-        component = entry["component"]
-        if not get_component(component, session):
+        if not get_component(entry["component"], session):
             self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
             return
 
@@ -760,7 +793,7 @@ class Upload(object):
 
         # Determine the location
         location = cnf["Dir::Pool"]
-        l = get_location(location, component, archive, session)
+        l = get_location(location, entry["component"], archive, session)
         if l is None:
             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
             entry["location id"] = -1
@@ -785,7 +818,7 @@ class Upload(object):
 
         # Check for packages that have moved from one component to another
         entry['suite'] = suite
-        res = get_binary_components(files[f]['package'], suite, entry["architecture"], session)
+        res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
         if res.rowcount > 0:
             entry["othercomponents"] = res.fetchone()[0]
 
@@ -834,7 +867,7 @@ class Upload(object):
         has_binaries = False
         has_source = False
 
-        s = DBConn().session()
+        session = DBConn().session()
 
         for f, entry in self.pkg.files.items():
             # Ensure the file does not already exist in one of the accepted directories
@@ -888,6 +921,8 @@ class Upload(object):
             for suite in self.pkg.changes["distribution"].keys():
                 self.per_suite_file_checks(f, suite, session)
 
+        session.close()
+
         # If the .changes file says it has source, it must have source.
         if self.pkg.changes["architecture"].has_key("source"):
             if not has_source:
@@ -897,7 +932,7 @@ class Upload(object):
                 self.rejects.append("source only uploads are not supported.")
 
     ###########################################################################
-    def check_dsc(self, action=True):
+    def check_dsc(self, action=True, session=None):
         """Returns bool indicating whether or not the source changes are valid"""
         # Ensure there is source to check
         if not self.pkg.changes["architecture"].has_key("source"):
@@ -934,7 +969,7 @@ class Upload(object):
 
         # Build up the file list of files mentioned by the .dsc
         try:
-            self.pkg.dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
+            self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
         except NoFilesFieldError:
             self.rejects.append("%s: no Files: field." % (dsc_filename))
             return False
@@ -954,13 +989,14 @@ class Upload(object):
         # Validate the source and version fields
         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
-        if not re_valid_version.match(dsc["version"]):
+        if not re_valid_version.match(self.pkg.dsc["version"]):
             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
 
-        # Bumping the version number of the .dsc breaks extraction by stable's
-        # dpkg-source.  So let's not do that...
-        if self.pkg.dsc["format"] != "1.0":
-            self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
+        # Only a limited list of source formats are allowed in each suite
+        for dist in self.pkg.changes["distribution"].keys():
+            allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
+            if self.pkg.dsc["format"] not in allowed:
+                self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
 
         # Validate the Maintainer field
         try:
@@ -974,11 +1010,6 @@ class Upload(object):
         for field_name in [ "build-depends", "build-depends-indep" ]:
             field = self.pkg.dsc.get(field_name)
             if field:
-                # Check for broken dpkg-dev lossage...
-                if field.startswith("ARRAY"):
-                    self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
-                                        (dsc_filename, field_name.title()))
-
                 # Have apt try to parse them...
                 try:
                     apt_pkg.ParseSrcDepends(field)
@@ -992,24 +1023,14 @@ class Upload(object):
         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
 
-        # Ensure there is a .tar.gz in the .dsc file
-        has_tar = False
-        for f in dsc_files.keys():
-            m = re_issource.match(f)
-            if not m:
-                self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
-                continue
-            ftype = m.group(3)
-            if ftype == "orig.tar.gz" or ftype == "tar.gz":
-                has_tar = True
-
-        if not has_tar:
-            self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
+        # Ensure the Files field contain only what's expected
+        self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
 
         # Ensure source is newer than existing source in target suites
+        session = DBConn().session()
         self.check_source_against_db(dsc_filename, session)
-
-        self.check_dsc_against_db(dsc_filename)
+        self.check_dsc_against_db(dsc_filename, session)
+        session.close()
 
         return True
 
@@ -1023,8 +1044,8 @@ class Upload(object):
 
         # Find the .dsc (again)
         dsc_filename = None
-        for f in self.files.keys():
-            if files[f]["type"] == "dsc":
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f]["type"] == "dsc":
                 dsc_filename = f
 
         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
@@ -1032,7 +1053,7 @@ class Upload(object):
             return
 
         # Create a symlink mirror of the source files in our temporary directory
-        for f in self.files.keys():
+        for f in self.pkg.files.keys():
             m = re_issource.match(f)
             if m:
                 src = os.path.join(source_dir, f)
@@ -1040,30 +1061,33 @@ class Upload(object):
                 if not os.path.exists(src):
                     return
                 ftype = m.group(3)
-                if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
+                if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \
+                   pkg.orig_files[f].has_key("path"):
                     continue
                 dest = os.path.join(os.getcwd(), f)
                 os.symlink(src, dest)
 
-        # If the orig.tar.gz is not a part of the upload, create a symlink to the
-        # existing copy.
-        if self.pkg.orig_tar_gz:
-            dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
-            os.symlink(self.pkg.orig_tar_gz, dest)
+        # If the orig files are not a part of the upload, create symlinks to the
+        # existing copies.
+        for orig_file in self.pkg.orig_files.keys():
+            if not self.pkg.orig_files[orig_file].has_key("path"):
+                continue
+            dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
+            os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
 
         # Extract the source
         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
         (result, output) = commands.getstatusoutput(cmd)
         if (result != 0):
             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
-            self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
+            self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
             return
 
         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
             return
 
         # Get the upstream version
-        upstr_version = re_no_epoch.sub('', dsc["version"])
+        upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
         if re_strip_revision.search(upstr_version):
             upstr_version = re_strip_revision.sub('', upstr_version)
 
@@ -1092,10 +1116,11 @@ class Upload(object):
         #      We should probably scrap or rethink the whole reprocess thing
         # Bail out if:
         #    a) there's no source
-        # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
-        # or c) the orig.tar.gz is MIA
+        # or b) reprocess is 2 - we will do this check next time when orig
+        #       tarball is in 'files'
+        # or c) the orig files are MIA
         if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
-           or self.pkg.orig_tar_gz == -1:
+           or len(self.pkg.orig_files) == 0:
             return
 
         tmpdir = utils.temp_dirname()
@@ -1114,6 +1139,7 @@ class Upload(object):
             shutil.rmtree(tmpdir)
         except OSError, e:
             if e.errno != errno.EACCES:
+                print "foobar"
                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
 
             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
@@ -1124,7 +1150,8 @@ class Upload(object):
             if result != 0:
                 utils.fubar("'%s' failed with result %s." % (cmd, result))
             shutil.rmtree(tmpdir)
-        except:
+        except Exception, e:
+            print "foobar2 (%s)" % e
             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
 
     ###########################################################################
@@ -1138,7 +1165,7 @@ class Upload(object):
 
         # We need to deal with the original changes blob, as the fields we need
         # might not be in the changes dict serialised into the .dak anymore.
-        orig_changes = parse_deb822(self.pkg.changes['filecontents'])
+        orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
 
         # Copy the checksums over to the current changes dict.  This will keep
         # the existing modifications to it intact.
@@ -1164,7 +1191,7 @@ class Upload(object):
                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
                     self.rejects.append(j)
 
-    def check_hashes():
+    def check_hashes(self):
         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
             self.rejects.append(m)
 
@@ -1177,8 +1204,95 @@ class Upload(object):
         for m in utils.check_size(".dsc", self.pkg.dsc_files):
             self.rejects.append(m)
 
-        for m in utils.ensure_hashes(self.pkg.changes, dsc, files, dsc_files):
-            self.rejects.append(m)
+        self.ensure_hashes()
+
+    ###########################################################################
+    def check_lintian(self):
+        # Only check some distributions
+        valid_dist = False
+        for dist in ('unstable', 'experimental'):
+            if dist in self.pkg.changes['distribution']:
+                valid_dist = True
+                break
+
+        if not valid_dist:
+            return
+
+        cnf = Config()
+        tagfile = cnf.get("Dinstall::LintianTags")
+        if tagfile is None:
+            # We don't have a tagfile, so just don't do anything.
+            return
+        # Parse the yaml file
+        sourcefile = file(tagfile, 'r')
+        sourcecontent = sourcefile.read()
+        sourcefile.close()
+        try:
+            lintiantags = yaml.load(sourcecontent)['lintian']
+        except yaml.YAMLError, msg:
+            utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
+            return
+
+        # Now setup the input file for lintian. lintian wants "one tag per line" only,
+        # so put it together like it. We put all types of tags in one file and then sort
+        # through lintians output later to see if its a fatal tag we detected, or not.
+        # So we only run lintian once on all tags, even if we might reject on some, but not
+        # reject on others.
+        # Additionally build up a set of tags
+        tags = set()
+        (fd, temp_filename) = utils.temp_filename()
+        temptagfile = os.fdopen(fd, 'w')
+        for tagtype in lintiantags:
+            for tag in lintiantags[tagtype]:
+                temptagfile.write("%s\n" % tag)
+                tags.add(tag)
+        temptagfile.close()
+
+        # So now we should look at running lintian at the .changes file, capturing output
+        # to then parse it.
+        command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
+        (result, output) = commands.getstatusoutput(command)
+        # We are done with lintian, remove our tempfile
+        os.unlink(temp_filename)
+        if (result == 2):
+            utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
+            utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
+
+        if len(output) == 0:
+            return
+
+        # We have output of lintian, this package isn't clean. Lets parse it and see if we
+        # are having a victim for a reject.
+        # W: tzdata: binary-without-manpage usr/sbin/tzconfig
+        for line in output.split('\n'):
+            m = re_parse_lintian.match(line)
+            if m is None:
+                continue
+
+            etype = m.group(1)
+            epackage = m.group(2)
+            etag = m.group(3)
+            etext = m.group(4)
+
+            # So lets check if we know the tag at all.
+            if etag not in tags:
+                continue
+
+            if etype == 'O':
+                # We know it and it is overriden. Check that override is allowed.
+                if etag in lintiantags['warning']:
+                    # The tag is overriden, and it is allowed to be overriden.
+                    # Don't add a reject message.
+                    pass
+                elif etag in lintiantags['error']:
+                    # The tag is overriden - but is not allowed to be
+                    self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
+            else:
+                # Tag is known, it is not overriden, direct reject.
+                self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
+                # Now tell if they *might* override it.
+                if etag in lintiantags['warning']:
+                    self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
 
     ###########################################################################
     def check_urgency(self):
@@ -1199,11 +1313,13 @@ class Upload(object):
     #  travel can cause errors on extraction]
 
     def check_timestamps(self):
+        Cnf = Config()
+
         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
         tar = TarTime(future_cutoff, past_cutoff)
 
-        for filename, entry in self.pkg.files.keys():
+        for filename, entry in self.pkg.files.items():
             if entry["type"] == "deb":
                 tar.reset()
                 try:
@@ -1239,11 +1355,85 @@ class Upload(object):
                 except:
                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
 
+    ###########################################################################
+    def check_transition(self, session):
+        cnf = Config()
+
+        sourcepkg = self.pkg.changes["source"]
+
+        # No sourceful upload -> no need to do anything else, direct return
+        # We also work with unstable uploads, not experimental or those going to some
+        # proposed-updates queue
+        if "source" not in self.pkg.changes["architecture"] or \
+           "unstable" not in self.pkg.changes["distribution"]:
+            return
+
+        # Also only check if there is a file defined (and existant) with
+        # checks.
+        transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
+        if transpath == "" or not os.path.exists(transpath):
+            return
+
+        # Parse the yaml file
+        sourcefile = file(transpath, 'r')
+        sourcecontent = sourcefile.read()
+        try:
+            transitions = yaml.load(sourcecontent)
+        except yaml.YAMLError, msg:
+            # This shouldn't happen, there is a wrapper to edit the file which
+            # checks it, but we prefer to be safe than ending up rejecting
+            # everything.
+            utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
+            return
+
+        # Now look through all defined transitions
+        for trans in transitions:
+            t = transitions[trans]
+            source = t["source"]
+            expected = t["new"]
+
+            # Will be None if nothing is in testing.
+            current = get_source_in_suite(source, "testing", session)
+            if current is not None:
+                compare = apt_pkg.VersionCompare(current.version, expected)
+
+            if current is None or compare < 0:
+                # This is still valid, the current version in testing is older than
+                # the new version we wait for, or there is none in testing yet
+
+                # Check if the source we look at is affected by this.
+                if sourcepkg in t['packages']:
+                    # The source is affected, lets reject it.
+
+                    rejectmsg = "%s: part of the %s transition.\n\n" % (
+                        sourcepkg, trans)
+
+                    if current is not None:
+                        currentlymsg = "at version %s" % (current.version)
+                    else:
+                        currentlymsg = "not present in testing"
+
+                    rejectmsg += "Transition description: %s\n\n" % (t["reason"])
+
+                    rejectmsg += "\n".join(textwrap.wrap("""Your package
+is part of a testing transition designed to get %s migrated (it is
+currently %s, we need version %s).  This transition is managed by the
+Release Team, and %s is the Release-Team member responsible for it.
+Please mail debian-release@lists.debian.org or contact %s directly if you
+need further assistance.  You might want to upload to experimental until this
+transition is done."""
+                            % (source, currentlymsg, expected,t["rm"], t["rm"])))
+
+                    self.rejects.append(rejectmsg)
+                    return
+
     ###########################################################################
     def check_signed_by_key(self):
         """Ensure the .changes is signed by an authorized uploader."""
         session = DBConn().session()
 
+        self.check_transition(session)
+
         (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
 
         # match claimed name with actual name:
@@ -1309,7 +1499,7 @@ class Upload(object):
                 for suite in self.pkg.changes["distribution"].keys():
                     q = session.query(DBSource)
                     q = q.join(DBBinary).filter_by(package=b)
-                    q = q.join(BinAssociation).join(Suite).filter_by(suite)
+                    q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
 
                     for s in q.all():
                         if s.source != self.pkg.changes["source"]:
@@ -1321,6 +1511,8 @@ class Upload(object):
                 if self.pkg.files[f].has_key("new"):
                     self.rejects.append("%s may not upload NEW file %s" % (uid, f))
 
+        session.close()
+
     ###########################################################################
     def build_summaries(self):
         """ Build a summary of changes the upload introduces. """
@@ -1371,6 +1563,7 @@ class Upload(object):
         for bug in bugs:
             summary += "%s " % (bug)
             if action:
+                self.update_subst()
                 self.Subst["__BUG_NUMBER__"] = bug
                 if self.pkg.changes["distribution"].has_key("stable"):
                     self.Subst["__STABLE_WARNING__"] = """
@@ -1381,15 +1574,15 @@ The update will eventually make its way into the next released Debian
 distribution."""
                 else:
                     self.Subst["__STABLE_WARNING__"] = ""
-                    mail_message = utils.TemplateSubst(self.Subst, template)
-                    utils.send_mail(mail_message)
+                mail_message = utils.TemplateSubst(self.Subst, template)
+                utils.send_mail(mail_message)
 
                 # Clear up after ourselves
                 del self.Subst["__BUG_NUMBER__"]
                 del self.Subst["__STABLE_WARNING__"]
 
-        if action:
-            self.Logger.log(["closing bugs"] + bugs)
+        if action and self.logger:
+            self.logger.log(["closing bugs"] + bugs)
 
         summary += "\n"
 
@@ -1426,7 +1619,7 @@ distribution."""
         self.Subst["__SHORT_SUMMARY__"] = short_summary
 
         for dist in self.pkg.changes["distribution"].keys():
-            announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
+            announce_list = cnf.Find("Suite::%s::Announce" % (dist))
             if announce_list == "" or lists_done.has_key(announce_list):
                 continue
 
@@ -1434,6 +1627,7 @@ distribution."""
             summary += "Announcing to %s\n" % (announce_list)
 
             if action:
+                self.update_subst()
                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
                 if cnf.get("Dinstall::TrackingServer") and \
                    self.pkg.changes["architecture"].has_key("source"):
@@ -1481,9 +1675,10 @@ distribution."""
             targetdir = cnf["Dir::Queue::Accepted"]
 
         print "Accepting."
-        self.Logger.log(["Accepting changes", self.pkg.changes_file])
+        if self.logger:
+            self.logger.log(["Accepting changes", self.pkg.changes_file])
 
-        self.write_dot_dak(targetdir)
+        self.pkg.write_dot_dak(targetdir)
 
         # Move all the files into the accepted directory
         utils.move(self.pkg.changes_file, targetdir)
@@ -1497,6 +1692,7 @@ distribution."""
         # Send accept mail, announce to lists, close bugs and check for
         # override disparities
         if not cnf["Dinstall::Options::No-Mail"]:
+            self.update_subst()
             self.Subst["__SUITE__"] = ""
             self.Subst["__SUMMARY__"] = summary
             mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
@@ -1551,7 +1747,7 @@ distribution."""
         # <Ganneff> yes
 
         # This routine returns None on success or an error on failure
-        res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
+        res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
         if res:
             utils.fubar(res)
 
@@ -1582,6 +1778,7 @@ distribution."""
 
         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
 
+        self.update_subst()
         self.Subst["__SUMMARY__"] = summary
         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
         utils.send_mail(mail_message)
@@ -1728,6 +1925,7 @@ distribution."""
 
         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
 
+        self.update_subst()
         if not manual:
             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
@@ -1739,7 +1937,7 @@ distribution."""
             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
-            self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
+            self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
             # Write the rejection email out as the <foo>.reason file
             os.write(reason_fd, reject_mail_message)
@@ -1754,12 +1952,13 @@ distribution."""
         if not cnf["Dinstall::Options::No-Mail"]:
             utils.send_mail(reject_mail_message)
 
-        self.Logger.log(["rejected", pkg.changes_file])
+        if self.logger:
+            self.logger.log(["rejected", self.pkg.changes_file])
 
         return 0
 
     ################################################################################
-    def in_override_p(self, package, component, suite, binary_type, file, session=None):
+    def in_override_p(self, package, component, suite, binary_type, file, session):
         """
         Check if a package already has override entries in the DB
 
@@ -1784,9 +1983,6 @@ distribution."""
 
         cnf = Config()
 
-        if session is None:
-            session = DBConn().session()
-
         if binary_type == "": # must be source
             file_type = "dsc"
         else:
@@ -1822,8 +2018,9 @@ distribution."""
 
         Description: TODO
         """
+        Cnf = Config()
         anyversion = None
-        anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
+        anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
         for (s, v) in sv_list:
             if s in [ x.lower() for x in anysuite ]:
                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
@@ -1911,34 +2108,28 @@ distribution."""
                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
 
     ################################################################################
-    def check_binary_against_db(self, file, session=None):
-        if session is None:
-            session = DBConn().session()
-
+    def check_binary_against_db(self, file, session):
         # Ensure version is sane
         q = session.query(BinAssociation)
         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
 
         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
-                                       file, files[file]["version"], sourceful=False)
+                                       file, self.pkg.files[file]["version"], sourceful=False)
 
         # Check for any existing copies of the file
-        q = session.query(DBBinary).filter_by(files[file]["package"])
-        q = q.filter_by(version=files[file]["version"])
-        q = q.join(Architecture).filter_by(arch_string=files[file]["architecture"])
+        q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
+        q = q.filter_by(version=self.pkg.files[file]["version"])
+        q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
 
         if q.count() > 0:
             self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
 
     ################################################################################
 
-    def check_source_against_db(self, file, session=None):
+    def check_source_against_db(self, file, session):
         """
         """
-        if session is None:
-            session = DBConn().session()
-
         source = self.pkg.dsc.get("source")
         version = self.pkg.dsc.get("version")
 
@@ -1950,21 +2141,20 @@ distribution."""
                                        file, version, sourceful=True)
 
     ################################################################################
-    def check_dsc_against_db(self, file, session=None):
+    def check_dsc_against_db(self, file, session):
         """
 
         @warning: NB: this function can remove entries from the 'files' index [if
-         the .orig.tar.gz is a duplicate of the one in the archive]; if
+         the orig tarball is a duplicate of the one in the archive]; if
          you're iterating over 'files' and call this function as part of
          the loop, be sure to add a check to the top of the loop to
          ensure you haven't just tried to dereference the deleted entry.
 
         """
 
-        if session is None:
-            session = DBConn().session()
-
-        self.pkg.orig_tar_gz = None
+        Cnf = Config()
+        self.pkg.orig_files = {} # XXX: do we need to clear it?
+        orig_files = self.pkg.orig_files
 
         # Try and find all files mentioned in the .dsc.  This has
         # to work harder to cope with the multiple possible
@@ -1979,7 +2169,7 @@ distribution."""
                 found = "%s in incoming" % (dsc_name)
 
                 # Check the file does not already exist in the archive
-                ql = get_poolfile_like_name(dsc_name)
+                ql = get_poolfile_like_name(dsc_name, session)
 
                 # Strip out anything that isn't '%s' or '/%s$'
                 for i in ql:
@@ -1998,7 +2188,7 @@ distribution."""
                 if len(ql) > 0:
                     # Ignore exact matches for .orig.tar.gz
                     match = 0
-                    if dsc_name.endswith(".orig.tar.gz"):
+                    if re_is_orig_source.match(dsc_name):
                         for i in ql:
                             if self.pkg.files.has_key(dsc_name) and \
                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
@@ -2007,14 +2197,16 @@ distribution."""
                                 # TODO: Don't delete the entry, just mark it as not needed
                                 # This would fix the stupidity of changing something we often iterate over
                                 # whilst we're doing it
-                                del files[dsc_name]
-                                self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
+                                del self.pkg.files[dsc_name]
+                                if not orig_files.has_key(dsc_name):
+                                    orig_files[dsc_name] = {}
+                                orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
                                 match = 1
 
                     if not match:
                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
 
-            elif dsc_name.endswith(".orig.tar.gz"):
+            elif re_is_orig_source.match(dsc_name):
                 # Check in the pool
                 ql = get_poolfile_like_name(dsc_name, session)
 
@@ -2048,29 +2240,34 @@ distribution."""
                     old_file_fh.close()
                     actual_size = os.stat(old_file)[stat.ST_SIZE]
                     found = old_file
-                    suite_type = f.location.archive_type
+                    suite_type = x.location.archive_type
                     # need this for updating dsc_files in install()
-                    dsc_entry["files id"] = f.file_id
+                    dsc_entry["files id"] = x.file_id
                     # See install() in process-accepted...
-                    self.pkg.orig_tar_id = f.file_id
-                    self.pkg.orig_tar_gz = old_file
-                    self.pkg.orig_tar_location = f.location.location_id
+                    if not orig_files.has_key(dsc_name):
+                        orig_files[dsc_name] = {}
+                    orig_files[dsc_name]["id"] = x.file_id
+                    orig_files[dsc_name]["path"] = old_file
+                    orig_files[dsc_name]["location"] = x.location.location_id
                 else:
                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
                     # Not there? Check the queue directories...
                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
-                        in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
+                        if not Cnf.has_key("Dir::Queue::%s" % (directory)):
+                            continue
+                        in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
                         if os.path.exists(in_otherdir):
                             in_otherdir_fh = utils.open_file(in_otherdir)
                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
                             in_otherdir_fh.close()
                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
                             found = in_otherdir
-                            self.pkg.orig_tar_gz = in_otherdir
+                            if not orig_files.has_key(dsc_name):
+                                orig_files[dsc_name] = {}
+                            orig_files[dsc_name]["path"] = in_otherdir
 
                     if not found:
                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
-                        self.pkg.orig_tar_gz = -1
                         continue
             else:
                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
@@ -2080,6 +2277,141 @@ distribution."""
             if actual_size != int(dsc_entry["size"]):
                 self.rejects.append("size for %s doesn't match %s." % (found, file))
 
+    ################################################################################
+    # This is used by process-new and process-holding to recheck a changes file
+    # at the time we're running.  It mainly wraps various other internal functions
+    # and is similar to accepted_checks - these should probably be tidied up
+    # and combined
+    def recheck(self, session):
+        cnf = Config()
+        for f in self.pkg.files.keys():
+            # The .orig.tar.gz can disappear out from under us is it's a
+            # duplicate of one in the archive.
+            if not self.pkg.files.has_key(f):
+                continue
+
+            entry = self.pkg.files[f]
+
+            # Check that the source still exists
+            if entry["type"] == "deb":
+                source_version = entry["source version"]
+                source_package = entry["source package"]
+                if not self.pkg.changes["architecture"].has_key("source") \
+                   and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+                    source_epochless_version = re_no_epoch.sub('', source_version)
+                    dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
+                    found = False
+                    for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
+                        if cnf.has_key("Dir::Queue::%s" % (q)):
+                            if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+                                found = True
+                    if not found:
+                        self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
+
+            # Version and file overwrite checks
+            if entry["type"] == "deb":
+                self.check_binary_against_db(f, session)
+            elif entry["type"] == "dsc":
+                self.check_source_against_db(f, session)
+                self.check_dsc_against_db(f, session)
+
+    ################################################################################
+    def accepted_checks(self, overwrite_checks, session):
+        # Recheck anything that relies on the database; since that's not
+        # frozen between accept and our run time when called from p-a.
+
+        # overwrite_checks is set to False when installing to stable/oldstable
+
+        propogate={}
+        nopropogate={}
+
+        # Find the .dsc (again)
+        dsc_filename = None
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f]["type"] == "dsc":
+                dsc_filename = f
+
+        for checkfile in self.pkg.files.keys():
+            # The .orig.tar.gz can disappear out from under us is it's a
+            # duplicate of one in the archive.
+            if not self.pkg.files.has_key(checkfile):
+                continue
+
+            entry = self.pkg.files[checkfile]
+
+            # Check that the source still exists
+            if entry["type"] == "deb":
+                source_version = entry["source version"]
+                source_package = entry["source package"]
+                if not self.pkg.changes["architecture"].has_key("source") \
+                   and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
+                    self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
+
+            # Version and file overwrite checks
+            if overwrite_checks:
+                if entry["type"] == "deb":
+                    self.check_binary_against_db(checkfile, session)
+                elif entry["type"] == "dsc":
+                    self.check_source_against_db(checkfile, session)
+                    self.check_dsc_against_db(dsc_filename, session)
+
+            # propogate in the case it is in the override tables:
+            for suite in self.pkg.changes.get("propdistribution", {}).keys():
+                if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
+                    propogate[suite] = 1
+                else:
+                    nopropogate[suite] = 1
+
+        for suite in propogate.keys():
+            if suite in nopropogate:
+                continue
+            self.pkg.changes["distribution"][suite] = 1
+
+        for checkfile in self.pkg.files.keys():
+            # Check the package is still in the override tables
+            for suite in self.pkg.changes["distribution"].keys():
+                if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
+                    self.rejects.append("%s is NEW for %s." % (checkfile, suite))
+
+    ################################################################################
+    # This is not really a reject, but an unaccept, but since a) the code for
+    # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
+    # extremely rare, for now we'll go with whining at our admin folks...
+
+    def do_unaccept(self):
+        cnf = Config()
+
+        self.update_subst()
+        self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
+        self.Subst["__REJECT_MESSAGE__"] = self.package_info()
+        self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
+        self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
+        if cnf.has_key("Dinstall::Bcc"):
+            self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
+
+        template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
+
+        reject_mail_message = utils.TemplateSubst(self.Subst, template)
+
+        # Write the rejection email out as the <foo>.reason file
+        reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
+        reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
+
+        # If we fail here someone is probably trying to exploit the race
+        # so let's just raise an exception ...
+        if os.path.exists(reject_filename):
+            os.unlink(reject_filename)
+
+        fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
+        os.write(fd, reject_mail_message)
+        os.close(fd)
+
+        utils.send_mail(reject_mail_message)
+
+        del self.Subst["__REJECTOR_ADDRESS__"]
+        del self.Subst["__REJECT_MESSAGE__"]
+        del self.Subst["__CC__"]
+
     ################################################################################
     # If any file of an upload has a recent mtime then chances are good
     # the file is still being uploaded.