]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/queue.py
Add support for multiple orig tarballs
[dak.git] / daklib / queue.py
index 5d791c5526fb70437e52b5f7f89d013ebaa62ec0..03bc7e05d17f4ecf05b1ad8fb3448bd41240ff05 100755 (executable)
@@ -38,8 +38,11 @@ import apt_pkg
 import utils
 import commands
 import shutil
+import textwrap
 from types import *
 
+import yaml
+
 from dak_exceptions import *
 from changes import *
 from regexes import *
@@ -47,30 +50,30 @@ from config import Config
 from holding import Holding
 from dbconn import *
 from summarystats import SummaryStats
-from utils import parse_changes
+from utils import parse_changes, check_dsc_files
 from textutils import fix_maintainer
 from binary import Binary
 
 ###############################################################################
 
-def get_type(f, session=None):
+def get_type(f, session):
     """
     Get the file type of C{f}
 
     @type f: dict
     @param f: file entry from Changes object
 
+    @type session: SQLA Session
+    @param session: SQL Alchemy session object
+
     @rtype: string
     @return: filetype
 
     """
-    if session is None:
-        session = DBConn().session()
-
     # Determine the type
     if f.has_key("dbtype"):
         file_type = file["dbtype"]
-    elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+    elif re_source_ext.match(f["type"]):
         file_type = "dsc"
     else:
         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
@@ -115,7 +118,7 @@ def determine_new(changes, files, warn=1):
         pkg = f["package"]
         priority = f["priority"]
         section = f["section"]
-        file_type = get_type(f)
+        file_type = get_type(f, session)
         component = f["component"]
 
         if file_type == "dsc":
@@ -160,6 +163,8 @@ def determine_new(changes, files, warn=1):
             if new[pkg].has_key("othercomponents"):
                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
 
+    session.close()
+
     return new
 
 ################################################################################
@@ -306,7 +311,7 @@ class Upload(object):
 
         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
         if not self.pkg.changes.has_key("architecture") or not \
-           isinstance(changes["architecture"], DictType):
+           isinstance(self.pkg.changes["architecture"], DictType):
             self.pkg.changes["architecture"] = { "Unknown" : "" }
 
         # and maintainer2047 may not exist.
@@ -323,7 +328,7 @@ class Upload(object):
            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
 
             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
-            self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
+            self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
         else:
             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
@@ -484,7 +489,7 @@ class Upload(object):
                 (source, dest) = args[1:3]
                 if self.pkg.changes["distribution"].has_key(source):
                     for arch in self.pkg.changes["architecture"].keys():
-                        if arch not in [ arch_string for a in get_suite_architectures(source) ]:
+                        if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
                             del self.pkg.changes["distribution"][source]
                             self.pkg.changes["distribution"][dest] = 1
@@ -685,11 +690,12 @@ class Upload(object):
         # Check the version and for file overwrites
         self.check_binary_against_db(f, session)
 
-        b = Binary(f)
-        b.scan_package()
-        if len(b.rejects) > 0:
-            for j in b.rejects:
-                self.rejects.append(j)
+        # Temporarily disable contents generation until we change the table storage layout
+        #b = Binary(f)
+        #b.scan_package()
+        #if len(b.rejects) > 0:
+        #    for j in b.rejects:
+        #        self.rejects.append(j)
 
     def source_file_checks(self, f, session):
         entry = self.pkg.files[f]
@@ -707,7 +713,7 @@ class Upload(object):
             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
 
         # Ensure the source version matches the version in the .changes file
-        if entry["type"] == "orig.tar.gz":
+        if re_is_orig_source.match(f):
             changes_version = self.pkg.changes["chopversion2"]
         else:
             changes_version = self.pkg.changes["chopversion"]
@@ -738,6 +744,16 @@ class Upload(object):
         if entry.has_key("byhand"):
             return
 
+        # Check we have fields we need to do these checks
+        oktogo = True
+        for m in ['component', 'package', 'priority', 'size', 'md5sum']:
+            if not entry.has_key(m):
+                self.rejects.append("file '%s' does not have field %s set" % (f, m))
+                oktogo = False
+
+        if not oktogo:
+            return
+
         # Handle component mappings
         for m in cnf.ValueList("ComponentMappings"):
             (source, dest) = m.split()
@@ -752,8 +768,7 @@ class Upload(object):
             return
 
         # Validate the component
-        component = entry["component"]
-        if not get_component(component, session):
+        if not get_component(entry["component"], session):
             self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
             return
 
@@ -895,6 +910,8 @@ class Upload(object):
             for suite in self.pkg.changes["distribution"].keys():
                 self.per_suite_file_checks(f, suite, session)
 
+        session.close()
+
         # If the .changes file says it has source, it must have source.
         if self.pkg.changes["architecture"].has_key("source"):
             if not has_source:
@@ -904,7 +921,7 @@ class Upload(object):
                 self.rejects.append("source only uploads are not supported.")
 
     ###########################################################################
-    def check_dsc(self, action=True):
+    def check_dsc(self, action=True, session=None):
         """Returns bool indicating whether or not the source changes are valid"""
         # Ensure there is source to check
         if not self.pkg.changes["architecture"].has_key("source"):
@@ -964,10 +981,11 @@ class Upload(object):
         if not re_valid_version.match(self.pkg.dsc["version"]):
             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
 
-        # Bumping the version number of the .dsc breaks extraction by stable's
-        # dpkg-source.  So let's not do that...
-        if self.pkg.dsc["format"] != "1.0":
-            self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
+        # Only a limited list of source formats are allowed in each suite
+        for dist in self.pkg.changes["distribution"].keys():
+            allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
+            if self.pkg.dsc["format"] not in allowed:
+                self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
 
         # Validate the Maintainer field
         try:
@@ -999,24 +1017,14 @@ class Upload(object):
         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
 
-        # Ensure there is a .tar.gz in the .dsc file
-        has_tar = False
-        for f in self.pkg.dsc_files.keys():
-            m = re_issource.match(f)
-            if not m:
-                self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
-                continue
-            ftype = m.group(3)
-            if ftype == "orig.tar.gz" or ftype == "tar.gz":
-                has_tar = True
-
-        if not has_tar:
-            self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
+        # Ensure the Files field contain only what's expected
+        self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
 
         # Ensure source is newer than existing source in target suites
-        self.check_source_against_db(dsc_filename)
-
-        self.check_dsc_against_db(dsc_filename)
+        session = DBConn().session()
+        self.check_source_against_db(dsc_filename, session)
+        self.check_dsc_against_db(dsc_filename, session)
+        session.close()
 
         return True
 
@@ -1047,16 +1055,19 @@ class Upload(object):
                 if not os.path.exists(src):
                     return
                 ftype = m.group(3)
-                if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
+                if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \
+                   pkg.orig_files[f].has_key("path"):
                     continue
                 dest = os.path.join(os.getcwd(), f)
                 os.symlink(src, dest)
 
-        # If the orig.tar.gz is not a part of the upload, create a symlink to the
-        # existing copy.
-        if self.pkg.orig_tar_gz:
-            dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
-            os.symlink(self.pkg.orig_tar_gz, dest)
+        # If the orig files are not a part of the upload, create symlinks to the
+        # existing copies.
+        for orig_file in self.pkg.orig_files.keys():
+            if not self.pkg.orig_files[orig_file].has_key("path"):
+                continue
+            dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
+            os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
 
         # Extract the source
         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
@@ -1099,10 +1110,11 @@ class Upload(object):
         #      We should probably scrap or rethink the whole reprocess thing
         # Bail out if:
         #    a) there's no source
-        # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
-        # or c) the orig.tar.gz is MIA
+        # or b) reprocess is 2 - we will do this check next time when orig
+        #       tarball is in 'files'
+        # or c) the orig files are MIA
         if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
-           or self.pkg.orig_tar_gz == -1:
+           or len(self.pkg.orig_files) == 0:
             return
 
         tmpdir = utils.temp_dirname()
@@ -1249,11 +1261,85 @@ class Upload(object):
                 except:
                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
 
+    ###########################################################################
+    def check_transition(self, session):
+        cnf = Config()
+
+        sourcepkg = self.pkg.changes["source"]
+
+        # No sourceful upload -> no need to do anything else, direct return
+        # We also work with unstable uploads, not experimental or those going to some
+        # proposed-updates queue
+        if "source" not in self.pkg.changes["architecture"] or \
+           "unstable" not in self.pkg.changes["distribution"]:
+            return
+
+        # Also only check if there is a file defined (and existant) with
+        # checks.
+        transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
+        if transpath == "" or not os.path.exists(transpath):
+            return
+
+        # Parse the yaml file
+        sourcefile = file(transpath, 'r')
+        sourcecontent = sourcefile.read()
+        try:
+            transitions = yaml.load(sourcecontent)
+        except yaml.YAMLError, msg:
+            # This shouldn't happen, there is a wrapper to edit the file which
+            # checks it, but we prefer to be safe than ending up rejecting
+            # everything.
+            utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
+            return
+
+        # Now look through all defined transitions
+        for trans in transitions:
+            t = transitions[trans]
+            source = t["source"]
+            expected = t["new"]
+
+            # Will be None if nothing is in testing.
+            current = get_source_in_suite(source, "testing", session)
+            if current is not None:
+                compare = apt_pkg.VersionCompare(current.version, expected)
+
+            if current is None or compare < 0:
+                # This is still valid, the current version in testing is older than
+                # the new version we wait for, or there is none in testing yet
+
+                # Check if the source we look at is affected by this.
+                if sourcepkg in t['packages']:
+                    # The source is affected, lets reject it.
+
+                    rejectmsg = "%s: part of the %s transition.\n\n" % (
+                        sourcepkg, trans)
+
+                    if current is not None:
+                        currentlymsg = "at version %s" % (current.version)
+                    else:
+                        currentlymsg = "not present in testing"
+
+                    rejectmsg += "Transition description: %s\n\n" % (t["reason"])
+
+                    rejectmsg += "\n".join(textwrap.wrap("""Your package
+is part of a testing transition designed to get %s migrated (it is
+currently %s, we need version %s).  This transition is managed by the
+Release Team, and %s is the Release-Team member responsible for it.
+Please mail debian-release@lists.debian.org or contact %s directly if you
+need further assistance.  You might want to upload to experimental until this
+transition is done."""
+                            % (source, currentlymsg, expected,t["rm"], t["rm"])))
+
+                    self.rejects.append(rejectmsg)
+                    return
+
     ###########################################################################
     def check_signed_by_key(self):
         """Ensure the .changes is signed by an authorized uploader."""
         session = DBConn().session()
 
+        self.check_transition(session)
+
         (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
 
         # match claimed name with actual name:
@@ -1319,7 +1405,7 @@ class Upload(object):
                 for suite in self.pkg.changes["distribution"].keys():
                     q = session.query(DBSource)
                     q = q.join(DBBinary).filter_by(package=b)
-                    q = q.join(BinAssociation).join(Suite).filter_by(suite)
+                    q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
 
                     for s in q.all():
                         if s.source != self.pkg.changes["source"]:
@@ -1331,6 +1417,8 @@ class Upload(object):
                 if self.pkg.files[f].has_key("new"):
                     self.rejects.append("%s may not upload NEW file %s" % (uid, f))
 
+        session.close()
+
     ###########################################################################
     def build_summaries(self):
         """ Build a summary of changes the upload introduces. """
@@ -1381,6 +1469,7 @@ class Upload(object):
         for bug in bugs:
             summary += "%s " % (bug)
             if action:
+                self.update_subst()
                 self.Subst["__BUG_NUMBER__"] = bug
                 if self.pkg.changes["distribution"].has_key("stable"):
                     self.Subst["__STABLE_WARNING__"] = """
@@ -1391,8 +1480,8 @@ The update will eventually make its way into the next released Debian
 distribution."""
                 else:
                     self.Subst["__STABLE_WARNING__"] = ""
-                    mail_message = utils.TemplateSubst(self.Subst, template)
-                    utils.send_mail(mail_message)
+                mail_message = utils.TemplateSubst(self.Subst, template)
+                utils.send_mail(mail_message)
 
                 # Clear up after ourselves
                 del self.Subst["__BUG_NUMBER__"]
@@ -1444,6 +1533,7 @@ distribution."""
             summary += "Announcing to %s\n" % (announce_list)
 
             if action:
+                self.update_subst()
                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
                 if cnf.get("Dinstall::TrackingServer") and \
                    self.pkg.changes["architecture"].has_key("source"):
@@ -1491,7 +1581,7 @@ distribution."""
             targetdir = cnf["Dir::Queue::Accepted"]
 
         print "Accepting."
-       if self.logger:
+        if self.logger:
             self.logger.log(["Accepting changes", self.pkg.changes_file])
 
         self.pkg.write_dot_dak(targetdir)
@@ -1508,6 +1598,7 @@ distribution."""
         # Send accept mail, announce to lists, close bugs and check for
         # override disparities
         if not cnf["Dinstall::Options::No-Mail"]:
+            self.update_subst()
             self.Subst["__SUITE__"] = ""
             self.Subst["__SUMMARY__"] = summary
             mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
@@ -1593,6 +1684,7 @@ distribution."""
 
         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
 
+        self.update_subst()
         self.Subst["__SUMMARY__"] = summary
         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
         utils.send_mail(mail_message)
@@ -1739,6 +1831,7 @@ distribution."""
 
         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
 
+        self.update_subst()
         if not manual:
             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
@@ -1765,13 +1858,13 @@ distribution."""
         if not cnf["Dinstall::Options::No-Mail"]:
             utils.send_mail(reject_mail_message)
 
-       if self.logger:
-            self.logger.log(["rejected", pkg.changes_file])
+        if self.logger:
+            self.logger.log(["rejected", self.pkg.changes_file])
 
         return 0
 
     ################################################################################
-    def in_override_p(self, package, component, suite, binary_type, file, session=None):
+    def in_override_p(self, package, component, suite, binary_type, file, session):
         """
         Check if a package already has override entries in the DB
 
@@ -1796,9 +1889,6 @@ distribution."""
 
         cnf = Config()
 
-        if session is None:
-            session = DBConn().session()
-
         if binary_type == "": # must be source
             file_type = "dsc"
         else:
@@ -1834,8 +1924,9 @@ distribution."""
 
         Description: TODO
         """
+        Cnf = Config()
         anyversion = None
-        anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
+        anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
         for (s, v) in sv_list:
             if s in [ x.lower() for x in anysuite ]:
                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
@@ -1923,10 +2014,7 @@ distribution."""
                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
 
     ################################################################################
-    def check_binary_against_db(self, file, session=None):
-        if session is None:
-            session = DBConn().session()
-
+    def check_binary_against_db(self, file, session):
         # Ensure version is sane
         q = session.query(BinAssociation)
         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
@@ -1945,12 +2033,9 @@ distribution."""
 
     ################################################################################
 
-    def check_source_against_db(self, file, session=None):
+    def check_source_against_db(self, file, session):
         """
         """
-        if session is None:
-            session = DBConn().session()
-
         source = self.pkg.dsc.get("source")
         version = self.pkg.dsc.get("version")
 
@@ -1962,21 +2047,20 @@ distribution."""
                                        file, version, sourceful=True)
 
     ################################################################################
-    def check_dsc_against_db(self, file, session=None):
+    def check_dsc_against_db(self, file, session):
         """
 
         @warning: NB: this function can remove entries from the 'files' index [if
-         the .orig.tar.gz is a duplicate of the one in the archive]; if
+         the orig tarball is a duplicate of the one in the archive]; if
          you're iterating over 'files' and call this function as part of
          the loop, be sure to add a check to the top of the loop to
          ensure you haven't just tried to dereference the deleted entry.
 
         """
 
-        if session is None:
-            session = DBConn().session()
-
-        self.pkg.orig_tar_gz = None
+        Cnf = Config()
+        self.pkg.orig_files = {} # XXX: do we need to clear it?
+        orig_files = self.pkg.orig_files
 
         # Try and find all files mentioned in the .dsc.  This has
         # to work harder to cope with the multiple possible
@@ -1991,7 +2075,7 @@ distribution."""
                 found = "%s in incoming" % (dsc_name)
 
                 # Check the file does not already exist in the archive
-                ql = get_poolfile_like_name(dsc_name)
+                ql = get_poolfile_like_name(dsc_name, session)
 
                 # Strip out anything that isn't '%s' or '/%s$'
                 for i in ql:
@@ -2010,7 +2094,7 @@ distribution."""
                 if len(ql) > 0:
                     # Ignore exact matches for .orig.tar.gz
                     match = 0
-                    if dsc_name.endswith(".orig.tar.gz"):
+                    if re_is_orig_source.match(dsc_name):
                         for i in ql:
                             if self.pkg.files.has_key(dsc_name) and \
                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
@@ -2020,13 +2104,15 @@ distribution."""
                                 # This would fix the stupidity of changing something we often iterate over
                                 # whilst we're doing it
                                 del self.pkg.files[dsc_name]
-                                self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
+                                if not orig_files.has_key(dsc_name):
+                                    orig_files[dsc_name] = {}
+                                orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
                                 match = 1
 
                     if not match:
                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
 
-            elif dsc_name.endswith(".orig.tar.gz"):
+            elif re_is_orig_source.match(dsc_name):
                 # Check in the pool
                 ql = get_poolfile_like_name(dsc_name, session)
 
@@ -2060,29 +2146,34 @@ distribution."""
                     old_file_fh.close()
                     actual_size = os.stat(old_file)[stat.ST_SIZE]
                     found = old_file
-                    suite_type = f.location.archive_type
+                    suite_type = x.location.archive_type
                     # need this for updating dsc_files in install()
-                    dsc_entry["files id"] = f.file_id
+                    dsc_entry["files id"] = x.file_id
                     # See install() in process-accepted...
-                    self.pkg.orig_tar_id = f.file_id
-                    self.pkg.orig_tar_gz = old_file
-                    self.pkg.orig_tar_location = f.location.location_id
+                    if not orig_files.has_key(dsc_name):
+                        orig_files[dsc_name] = {}
+                    orig_files[dsc_name]["id"] = x.file_id
+                    orig_files[dsc_name]["path"] = old_file
+                    orig_files[dsc_name]["location"] = x.location.location_id
                 else:
                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
                     # Not there? Check the queue directories...
                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
-                        in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
+                        if not Cnf.has_key("Dir::Queue::%s" % (directory)):
+                            continue
+                        in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
                         if os.path.exists(in_otherdir):
                             in_otherdir_fh = utils.open_file(in_otherdir)
                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
                             in_otherdir_fh.close()
                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
                             found = in_otherdir
-                            self.pkg.orig_tar_gz = in_otherdir
+                            if not orig_files.has_key(dsc_name):
+                                orig_files[dsc_name] = {}
+                            orig_files[dsc_name]["path"] = in_otherdir
 
                     if not found:
                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
-                        self.pkg.orig_tar_gz = -1
                         continue
             else:
                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
@@ -2093,18 +2184,21 @@ distribution."""
                 self.rejects.append("size for %s doesn't match %s." % (found, file))
 
     ################################################################################
-    def accepted_checks(self, overwrite_checks=True, session=None):
+    def accepted_checks(self, overwrite_checks, session):
         # Recheck anything that relies on the database; since that's not
         # frozen between accept and our run time when called from p-a.
 
         # overwrite_checks is set to False when installing to stable/oldstable
 
-        if session is None:
-            session = DBConn().session()
-
         propogate={}
         nopropogate={}
 
+        # Find the .dsc (again)
+        dsc_filename = None
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f]["type"] == "dsc":
+                dsc_filename = f
+
         for checkfile in self.pkg.files.keys():
             # The .orig.tar.gz can disappear out from under us is it's a
             # duplicate of one in the archive.
@@ -2131,7 +2225,7 @@ distribution."""
 
             # propogate in the case it is in the override tables:
             for suite in self.pkg.changes.get("propdistribution", {}).keys():
-                if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
+                if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
                     propogate[suite] = 1
                 else:
                     nopropogate[suite] = 1
@@ -2144,7 +2238,7 @@ distribution."""
         for checkfile in self.pkg.files.keys():
             # Check the package is still in the override tables
             for suite in self.pkg.changes["distribution"].keys():
-                if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
+                if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
 
     ################################################################################
@@ -2155,6 +2249,7 @@ distribution."""
     def do_unaccept(self):
         cnf = Config()
 
+        self.update_subst()
         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]