]> git.decadent.org.uk Git - dak.git/blobdiff - katie.py
Add new top level directories
[dak.git] / katie.py
index 09d8c8cb2df998bdb97bb3b89294d13b54254939..7f428318ffea66b24e305932eda4975e1730a04a 100644 (file)
--- a/katie.py
+++ b/katie.py
@@ -1,8 +1,8 @@
 #!/usr/bin/env python
 
 # Utility functions for katie
-# Copyright (C) 2001, 2002, 2003  James Troup <james@nocrew.org>
-# $Id: katie.py,v 1.33 2003-04-08 21:38:52 troup Exp $
+# Copyright (C) 2001, 2002, 2003, 2004, 2005  James Troup <james@nocrew.org>
+# $Id: katie.py,v 1.59 2005-12-17 10:57:03 rmurray Exp $
 
 # This program is free software; you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -20,7 +20,7 @@
 
 ###############################################################################
 
-import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
+import cPickle, errno, os, pg, re, stat, string, sys, time;
 import utils, db_access;
 import apt_inst, apt_pkg;
 
@@ -30,10 +30,8 @@ from types import *;
 
 re_isanum = re.compile (r"^\d+$");
 re_default_answer = re.compile(r"\[(.*)\]");
-re_fdnic = re.compile("\n\n");
-re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
-re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
-
+re_fdnic = re.compile(r"\n\n");
+re_bin_only_nmu = re.compile(r"\+b\d+$");
 ###############################################################################
 
 # Convenience wrapper to carry around all the package information in
@@ -66,7 +64,9 @@ class nmu_p:
         changes = pkg.changes;
         dsc = pkg.dsc;
 
-        (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"]).lower());
+        i = utils.fix_maintainer (dsc.get("maintainer",
+                                          Cnf["Dinstall::MyEmailAddress"]).lower());
+        (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i;
         # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
         if dsc_name == changes["maintainername"].lower() and \
            (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
@@ -76,7 +76,7 @@ class nmu_p:
             uploaders = dsc["uploaders"].lower().split(",");
             uploadernames = {};
             for i in uploaders:
-                (rfc822, name, email) = utils.fix_maintainer (i.strip());
+                (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip());
                 uploadernames[name] = "";
             if uploadernames.has_key(changes["changedbyname"].lower()):
                 return 0;
@@ -93,7 +93,6 @@ class Katie:
 
     def __init__(self, Cnf):
         self.Cnf = Cnf;
-        self.values = {};
         # Read in the group-maint override file
         self.nmu = nmu_p(Cnf);
         self.accept_count = 0;
@@ -118,6 +117,7 @@ class Katie:
             exec "self.pkg.%s.clear();" % (i);
         self.pkg.orig_tar_id = None;
         self.pkg.orig_tar_location = "";
+        self.pkg.orig_tar_gz = None;
 
     ###########################################################################
 
@@ -167,18 +167,19 @@ class Katie:
                     d_files[file][i] = files[file][i];
         ## changes
         # Mandatory changes fields
-        for i in [ "distribution", "source", "architecture", "version", "maintainer",
-                   "urgency", "fingerprint", "changedby822", "changedbyname",
-                   "maintainername", "maintaineremail", "closes" ]:
+        for i in [ "distribution", "source", "architecture", "version",
+                   "maintainer", "urgency", "fingerprint", "changedby822",
+                   "changedby2047", "changedbyname", "maintainer822",
+                   "maintainer2047", "maintainername", "maintaineremail",
+                   "closes", "changes" ]:
             d_changes[i] = changes[i];
         # Optional changes fields
-        # FIXME: changes should be mandatory
-        for i in [ "changed-by", "maintainer822", "filecontents", "format",
-                   "changes", "lisa note" ]:
+        for i in [ "changed-by", "filecontents", "format", "lisa note", "distribution-version" ]:
             if changes.has_key(i):
                 d_changes[i] = changes[i];
         ## dsc
-        for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
+        for i in [ "source", "version", "maintainer", "fingerprint",
+                   "uploaders", "bts changelog" ]:
             if dsc.has_key(i):
                 d_dsc[i] = dsc[i];
         ## dsc_files
@@ -207,9 +208,9 @@ class Katie:
         # If jennifer crashed out in the right place, architecture may still be a string.
         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
             changes["architecture"] = { "Unknown" : "" };
-        # and maintainer822 may not exist.
-        if not changes.has_key("maintainer822"):
-            changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
+        # and maintainer2047 may not exist.
+        if not changes.has_key("maintainer2047"):
+            changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"];
 
         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
@@ -217,12 +218,13 @@ class Katie:
 
         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
-            Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
-            Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
+            Subst["__MAINTAINER_FROM__"] = changes["changedby2047"];
+            Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
+                                                     changes["maintainer2047"]);
             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
         else:
-            Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
-            Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
+            Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"];
+            Subst["__MAINTAINER_TO__"] = changes["maintainer2047"];
             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
@@ -261,9 +263,11 @@ class Katie:
                 if files[file].has_key("othercomponents"):
                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
                 if files[file]["type"] == "deb":
-                    summary += apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
+                    deb_fh = utils.open_file(file)
+                    summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n';
+                    deb_fh.close()
             else:
-                files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
+                files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
                 summary += file + "\n  to " + destination + "\n"
 
@@ -293,25 +297,42 @@ class Katie:
 
         bugs.sort();
         if not self.nmu.is_an_nmu(self.pkg):
-            summary += "Closing bugs: ";
-            for bug in bugs:
-                summary += "%s " % (bug);
-                if action:
-                    Subst["__BUG_NUMBER__"] = bug;
-                    if changes["distribution"].has_key("stable"):
-                        Subst["__STABLE_WARNING__"] = """
+            if changes["distribution"].has_key("experimental"):
+               # tag bugs as fixed-in-experimental for uploads to experimental
+               summary += "Setting bugs to severity fixed: ";
+               control_message = "";
+               for bug in bugs:
+                   summary += "%s " % (bug);
+                   control_message += "tag %s + fixed-in-experimental\n" % (bug);
+               if action and control_message != "":
+                   Subst["__CONTROL_MESSAGE__"] = control_message;
+                   mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-experimental-fixed");
+                   utils.send_mail (mail_message);
+               if action:
+                   self.Logger.log(["setting bugs to fixed"]+bugs);
+
+
+           else:
+               summary += "Closing bugs: ";
+               for bug in bugs:
+                   summary += "%s " % (bug);
+                   if action:
+                       Subst["__BUG_NUMBER__"] = bug;
+                       if changes["distribution"].has_key("stable"):
+                           Subst["__STABLE_WARNING__"] = """
 Note that this package is not part of the released stable Debian
 distribution.  It may have dependencies on other unreleased software,
 or other instabilities.  Please take care if you wish to install it.
 The update will eventually make its way into the next released Debian
 distribution.""";
-                    else:
-                        Subst["__STABLE_WARNING__"] = "";
-                    mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
-                    utils.send_mail (mail_message);
-            if action:
-                self.Logger.log(["closing bugs"]+bugs);
-        else:                     # NMU
+                       else:
+                           Subst["__STABLE_WARNING__"] = "";
+                           mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
+                           utils.send_mail (mail_message);
+                if action:
+                    self.Logger.log(["closing bugs"]+bugs);
+
+       else:                     # NMU
             summary += "Setting bugs to severity fixed: ";
             control_message = "";
             for bug in bugs:
@@ -366,14 +387,17 @@ distribution.""";
         Cnf = self.Cnf;
         Subst = self.Subst;
         files = self.pkg.files;
+        changes = self.pkg.changes;
+        changes_file = self.pkg.changes_file;
+        dsc = self.pkg.dsc;
 
         print "Accepting."
-        self.Logger.log(["Accepting changes",self.pkg.changes_file]);
+        self.Logger.log(["Accepting changes",changes_file]);
 
         self.dump_vars(Cnf["Dir::Queue::Accepted"]);
 
         # Move all the files into the accepted directory
-        utils.move(self.pkg.changes_file, Cnf["Dir::Queue::Accepted"]);
+        utils.move(changes_file, Cnf["Dir::Queue::Accepted"]);
         file_keys = files.keys();
         for file in file_keys:
             utils.move(file, Cnf["Dir::Queue::Accepted"]);
@@ -389,26 +413,77 @@ distribution.""";
             utils.send_mail(mail_message)
             self.announce(short_summary, 1)
 
-        # Special support to enable clean auto-building of accepted packages
+
+        ## Helper stuff for DebBugs Version Tracking
+        if Cnf.Find("Dir::Queue::BTSVersionTrack"):
+            # ??? once queue/* is cleared on *.d.o and/or reprocessed
+            # the conditionalization on dsc["bts changelog"] should be
+            # dropped.
+
+            # Write out the version history from the changelog
+            if changes["architecture"].has_key("source") and \
+               dsc.has_key("bts changelog"):
+
+                temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
+                                                    dotprefix=1, perms=0644);
+                version_history = utils.open_file(temp_filename, 'w');
+                version_history.write(dsc["bts changelog"]);
+                version_history.close();
+                filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
+                                      changes_file[:-8]+".versions");
+                os.rename(temp_filename, filename);
+
+            # Write out the binary -> source mapping.
+            temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
+                                                dotprefix=1, perms=0644);
+            debinfo = utils.open_file(temp_filename, 'w');
+            for file in file_keys:
+                f = files[file];
+                if f["type"] == "deb":
+                    line = " ".join([f["package"], f["version"],
+                                     f["architecture"], f["source package"],
+                                     f["source version"]]);
+                    debinfo.write(line+"\n");
+            debinfo.close();
+            filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
+                                  changes_file[:-8]+".debinfo");
+            os.rename(temp_filename, filename);
+
+        self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
+
+    ###########################################################################
+
+    def queue_build (self, queue, path):
+        Cnf = self.Cnf
+        Subst = self.Subst
+        files = self.pkg.files
+        changes = self.pkg.changes
+        changes_file = self.pkg.changes_file
+        dsc = self.pkg.dsc
+        file_keys = files.keys()
+
+        ## Special support to enable clean auto-building of queued packages
+        queue_id = db_access.get_or_set_queue_id(queue)
+
         self.projectB.query("BEGIN WORK");
-        for suite in self.pkg.changes["distribution"].keys():
-            if suite not in Cnf.ValueList("Dinstall::AcceptedAutoBuildSuites"):
+        for suite in changes["distribution"].keys():
+            if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
                 continue;
             suite_id = db_access.get_suite_id(suite);
-            dest_dir = Cnf["Dir::AcceptedAutoBuild"];
-            if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
+            dest_dir = Cnf["Dir::QueueBuild"];
+            if Cnf.FindB("Dinstall::SecurityQueueBuild"):
                 dest_dir = os.path.join(dest_dir, suite);
             for file in file_keys:
-                src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
+                src = os.path.join(path, file);
                 dest = os.path.join(dest_dir, file);
-                if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
+                if Cnf.FindB("Dinstall::SecurityQueueBuild"):
                     # Copy it since the original won't be readable by www-data
                     utils.copy(src, dest);
                 else:
                     # Create a symlink to it
                     os.symlink(src, dest);
                 # Add it to the list of packages for later processing by apt-ftparchive
-                self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
+                self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest));
             # If the .orig.tar.gz is in the pool, create a symlink to
             # it (if one doesn't already exist)
             if self.pkg.orig_tar_id:
@@ -427,10 +502,10 @@ distribution.""";
                     src = os.path.join(ql[0][0], ql[0][1]);
                     os.symlink(src, dest);
                     # Add it to the list of packages for later processing by apt-ftparchive
-                    self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
+                    self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest));
                 # if it does, update things to ensure it's not removed prematurely
                 else:
-                    self.projectB.query("UPDATE accepted_autobuild SET in_accepted = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
+                    self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
 
         self.projectB.query("COMMIT WORK");
 
@@ -452,7 +527,9 @@ distribution.""";
             return;
 
         summary = "";
-        for file in files.keys():
+        file_keys = files.keys();
+        file_keys.sort();
+        for file in file_keys:
             if not files[file].has_key("new") and files[file]["type"] == "deb":
                 section = files[file]["section"];
                 override_section = files[file]["override section"];
@@ -489,7 +566,7 @@ distribution.""";
                 continue;
             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
             try:
-                os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
+                dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
             except OSError, e:
                 # File exists?  Let's try and move it to the morgue
                 if errno.errorcode[e.errno] == 'EEXIST':
@@ -503,7 +580,7 @@ distribution.""";
                         return;
                     utils.move(dest_file, morgue_file, perms=0660);
                     try:
-                        os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
+                        dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
                     except OSError, e:
                         # Likewise
                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
@@ -513,6 +590,7 @@ distribution.""";
             # If we got here, we own the destination file, so we can
             # safely overwrite it.
             utils.move(file, dest_file, 1, perms=0660);
+            os.close(dest_fd)
 
     ###########################################################################
 
@@ -520,16 +598,14 @@ distribution.""";
         # If we weren't given a manual rejection message, spawn an
         # editor so the user can add one in...
         if manual and not reject_message:
-            temp_filename = tempfile.mktemp();
-            fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
-            os.close(fd);
+            temp_filename = utils.temp_filename();
             editor = os.environ.get("EDITOR","vi")
             answer = 'E';
             while answer == 'E':
                 os.system("%s %s" % (editor, temp_filename))
-                file = utils.open_file(temp_filename);
-                reject_message = "".join(file.readlines());
-                file.close();
+                temp_fh = utils.open_file(temp_filename);
+                reject_message = "".join(temp_fh.readlines());
+                temp_fh.close();
                 print "Reject message:";
                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1);
                 prompt = "[R]eject, Edit, Abandon, Quit ?"
@@ -553,7 +629,7 @@ distribution.""";
         pkg = self.pkg;
 
         reason_filename = pkg.changes_file[:-8] + ".reason";
-        reject_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
+        reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
 
         # Move all the files into the reject directory
         reject_files = pkg.files.keys() + [pkg.changes_file];
@@ -561,16 +637,15 @@ distribution.""";
 
         # If we fail here someone is probably trying to exploit the race
         # so let's just raise an exception ...
-        if os.path.exists(reject_filename):
-            os.unlink(reject_filename);
-        fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
+        if os.path.exists(reason_filename):
+            os.unlink(reason_filename);
+        reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
 
         if not manual:
             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
             Subst["__MANUAL_REJECT_MESSAGE__"] = "";
             Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
-            os.write(fd, reject_message);
-            os.close(fd);
+            os.write(reason_fd, reject_message);
             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
         else:
             # Build up the rejection email
@@ -580,10 +655,10 @@ distribution.""";
             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
-
             # Write the rejection email out as the <foo>.reason file
-            os.write(fd, reject_mail_message);
-            os.close(fd);
+            os.write(reason_fd, reject_mail_message);
+
+        os.close(reason_fd)
 
         # Send the rejection mail if appropriate
         if not Cnf["Dinstall::Options::No-Mail"]:
@@ -598,34 +673,50 @@ distribution.""";
     # upload being processed.
     #
     # (1) exact match                      => 1.0-3
-    # (2) Bin-only NMU of an MU            => 1.0-3.0.1
-    # (3) Bin-only NMU of a sourceful-NMU  => 1.0-3.1.1
-
-    def source_exists (self, package, source_version):
-        q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
-
-        # Reduce the query results to a list of version numbers
-        ql = map(lambda x: x[0], q.getresult());
-
-        # Try (1)
-        if ql.count(source_version):
-            return 1;
-
-        # Try (2)
-        orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
-        if ql.count(orig_source_version):
-            return 1;
-
-        # Try (3)
-        orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
-        if ql.count(orig_source_version):
-            return 1;
-
-        # No source found...
-        return 0;
+    # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
+
+    def source_exists (self, package, source_version, suites = ["any"]):
+       okay = 1
+       for suite in suites:
+           if suite == "any":
+               que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
+                   (package)
+           else:
+               # source must exist in suite X, or in some other suite that's
+               # mapped to X, recursively... silent-maps are counted too,
+               # unreleased-maps aren't.
+               maps = self.Cnf.ValueList("SuiteMappings")[:]
+               maps.reverse()
+               maps = [ m.split() for m in maps ]
+               maps = [ (x[1], x[2]) for x in maps
+                               if x[0] == "map" or x[0] == "silent-map" ]
+               s = [suite]
+               for x in maps:
+                       if x[1] in s and x[0] not in s:
+                               s.append(x[0])
+
+               que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "));
+            q = self.projectB.query(que)
+
+            # Reduce the query results to a list of version numbers
+            ql = map(lambda x: x[0], q.getresult());
+
+            # Try (1)
+            if source_version in ql:
+                continue
+
+            # Try (2)
+            orig_source_version = re_bin_only_nmu.sub('', source_version)
+            if orig_source_version in ql:
+                continue
+
+            # No source found...
+            okay = 0
+           break
+       return okay
 
     ################################################################################
-
+    
     def in_override_p (self, package, component, suite, binary_type, file):
         files = self.pkg.files;
 
@@ -646,7 +737,7 @@ distribution.""";
         type_id = db_access.get_override_type_id(type);
 
         # FIXME: nasty non-US speficic hack
-        if component[:7].lower() == "non-us/":
+        if component.lower().startswith("non-us/"):
             component = component[7:];
 
         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
@@ -679,6 +770,17 @@ distribution.""";
 
     ################################################################################
 
+    def get_anyversion(self, query_result, suite):
+        anyversion=None
+        anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
+        for (v, s) in query_result:
+            if s in [ string.lower(x) for x in anysuite ]:
+                if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
+                    anyversion=v
+        return anyversion
+
+    ################################################################################
+
     def cross_suite_version_check(self, query_result, file, new_version):
         """Ensure versions are newer than existing packages in target
         suites and that cross-suite version checking rules as
@@ -695,11 +797,52 @@ distribution.""";
                 existent_version = entry[0];
                 suite = entry[1];
                 if suite in must_be_newer_than and \
-                   apt_pkg.VersionCompare(new_version, existent_version) != 1:
+                   apt_pkg.VersionCompare(new_version, existent_version) < 1:
                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
                 if suite in must_be_older_than and \
-                   apt_pkg.VersionCompare(new_version, existent_version) != -1:
-                    self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
+                   apt_pkg.VersionCompare(new_version, existent_version) > -1:
+                    ch = self.pkg.changes
+                    cansave = 0
+                    if ch.get('distribution-version', {}).has_key(suite):
+                        # we really use the other suite, ignoring the conflicting one ...
+                        addsuite = ch["distribution-version"][suite]
+                    
+                        add_version = self.get_anyversion(query_result, addsuite)
+                        target_version = self.get_anyversion(query_result, target_suite)
+                    
+                        if not add_version:
+                            # not add_version can only happen if we map to a suite
+                            # that doesn't enhance the suite we're propup'ing from.
+                            # so "propup-ver x a b c; map a d" is a problem only if
+                            # d doesn't enhance a.
+                            #
+                            # i think we could always propagate in this case, rather
+                            # than complaining. either way, this isn't a REJECT issue
+                            #
+                            # And - we really should complain to the dorks who configured dak
+                            self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
+                            self.pkg.changes.setdefault("propdistribution", {})
+                            self.pkg.changes["propdistribution"][addsuite] = 1
+                            cansave = 1
+                        elif not target_version:
+                            # not targets_version is true when the package is NEW
+                            # we could just stick with the "...old version..." REJECT
+                            # for this, I think.
+                            self.reject("Won't propogate NEW packages.")
+                        elif apt_pkg.VersionCompare(new_version, add_version) < 0:
+                            # propogation would be redundant. no need to reject though.
+                            self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
+                            cansave = 1
+                        elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
+                             apt_pkg.VersionCompare(add_version, target_version) >= 0:
+                            # propogate!!
+                            self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
+                            self.pkg.changes.setdefault("propdistribution", {})
+                            self.pkg.changes["propdistribution"][addsuite] = 1
+                            cansave = 1
+                
+                    if not cansave:
+                        self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
 
     ################################################################################
 
@@ -726,7 +869,7 @@ SELECT b.id FROM binaries b, architecture a
                                    files[file]["version"],
                                    files[file]["architecture"]))
         if q.getresult():
-            self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
+            self.reject("%s: can not overwrite existing copy already in the archive." % (file));
 
         return self.reject_message;
 
@@ -746,12 +889,20 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
 
     ################################################################################
 
+    # **WARNING**
+    # NB: this function can remove entries from the 'files' index [if
+    # the .orig.tar.gz is a duplicate of the one in the archive]; if
+    # you're iterating over 'files' and call this function as part of
+    # the loop, be sure to add a check to the top of the loop to
+    # ensure you haven't just tried to derefernece the deleted entry.
+    # **WARNING**
+
     def check_dsc_against_db(self, file):
         self.reject_message = "";
         files = self.pkg.files;
         dsc_files = self.pkg.dsc_files;
         legacy_source_untouchable = self.pkg.legacy_source_untouchable;
-        orig_tar_gz = None;
+        self.pkg.orig_tar_gz = None;
 
         # Try and find all files mentioned in the .dsc.  This has
         # to work harder to cope with the multiple possible
@@ -763,13 +914,11 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
                 actual_size = int(files[dsc_file]["size"]);
                 found = "%s in incoming" % (dsc_file)
                 # Check the file does not already exist in the archive
-                q = self.projectB.query("SELECT size, md5sum, filename FROM files WHERE filename LIKE '%%%s%%'" % (dsc_file));
-
+                q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
                 ql = q.getresult();
                 # Strip out anything that isn't '%s' or '/%s$'
                 for i in ql:
-                    if i[2] != dsc_file and i[2][-(len(dsc_file)+1):] != '/'+dsc_file:
-                        self.Logger.log(["check_dsc_against_db",i[2],dsc_file]);
+                    if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
                         ql.remove(i);
 
                 # "[katie] has not broken them.  [katie] has fixed a
@@ -791,6 +940,7 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
                                files[dsc_file]["md5sum"] == i[1]:
                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
                                 del files[dsc_file];
+                                self.pkg.orig_tar_gz = i[2] + i[3];
                                 match = 1;
 
                     if not match:
@@ -802,21 +952,21 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
                 # Strip out anything that isn't '%s' or '/%s$'
                 for i in ql:
                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
-                        self.Logger.log(["check_dsc_against_db",i[1],dsc_file]);
                         ql.remove(i);
 
                 if ql:
-                    # Unfortunately, we make get more than one
-                    # match here if, for example, the package was
-                    # in potato but had a -sa upload in woody.  So
-                    # we need to choose the right one.
+                    # Unfortunately, we may get more than one match here if,
+                    # for example, the package was in potato but had an -sa
+                    # upload in woody.  So we need to choose the right one.
 
                     x = ql[0]; # default to something sane in case we don't match any or have only one
 
                     if len(ql) > 1:
                         for i in ql:
                             old_file = i[0] + i[1];
-                            actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
+                            old_file_fh = utils.open_file(old_file)
+                            actual_md5 = apt_pkg.md5sum(old_file_fh);
+                            old_file_fh.close()
                             actual_size = os.stat(old_file)[stat.ST_SIZE];
                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
                                 x = i;
@@ -824,13 +974,16 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
                                 legacy_source_untouchable[i[3]] = "";
 
                     old_file = x[0] + x[1];
-                    actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
+                    old_file_fh = utils.open_file(old_file)
+                    actual_md5 = apt_pkg.md5sum(old_file_fh);
+                    old_file_fh.close()
                     actual_size = os.stat(old_file)[stat.ST_SIZE];
                     found = old_file;
                     suite_type = x[2];
                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
                     # See install() in katie...
                     self.pkg.orig_tar_id = x[3];
+                    self.pkg.orig_tar_gz = old_file;
                     if suite_type == "legacy" or suite_type == "legacy-mixed":
                         self.pkg.orig_tar_location = "legacy";
                     else:
@@ -846,12 +999,16 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
                         for dir in [ "Accepted", "New", "Byhand" ]:
                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
                             if os.path.exists(in_otherdir):
-                                actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
+                                in_otherdir_fh = utils.open_file(in_otherdir)
+                                actual_md5 = apt_pkg.md5sum(in_otherdir_fh);
+                                in_otherdir_fh.close()
                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
                                 found = in_otherdir;
+                                self.pkg.orig_tar_gz = in_otherdir;
 
                     if not found:
                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
+                        self.pkg.orig_tar_gz = -1;
                         continue;
             else:
                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
@@ -861,7 +1018,7 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
             if actual_size != int(dsc_files[dsc_file]["size"]):
                 self.reject("size for %s doesn't match %s." % (found, file));
 
-        return (self.reject_message, orig_tar_gz);
+        return (self.reject_message, None);
 
     def do_query(self, q):
         sys.stderr.write("query: \"%s\" ... " % (q));