]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/queue.py
changes for DM support
[dak.git] / daklib / queue.py
index ea8f676aa66ea9e68003be4e9f3481835ecdc487..d77f5f357796c000998bb47a63601586c331bbe3 100644 (file)
@@ -45,56 +45,10 @@ class Pkg:
 
 ###############################################################################
 
-class nmu_p:
-    # Read in the group maintainer override file
-    def __init__ (self, Cnf):
-        self.group_maint = {}
-        self.Cnf = Cnf
-        if Cnf.get("Dinstall::GroupOverrideFilename"):
-            filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
-            file = utils.open_file(filename)
-            for line in file.readlines():
-                line = utils.re_comments.sub('', line).lower().strip()
-                if line != "":
-                    self.group_maint[line] = 1
-            file.close()
-
-    def is_an_nmu (self, pkg):
-        Cnf = self.Cnf
-        changes = pkg.changes
-        dsc = pkg.dsc
-
-        i = utils.fix_maintainer (dsc.get("maintainer",
-                                          Cnf["Dinstall::MyEmailAddress"]).lower())
-        (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
-        # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
-        if dsc_name == changes["maintainername"].lower() and \
-           (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
-            return 0
-
-        if dsc.has_key("uploaders"):
-            uploaders = dsc["uploaders"].lower().split(",")
-            uploadernames = {}
-            for i in uploaders:
-                (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
-                uploadernames[name] = ""
-            if uploadernames.has_key(changes["changedbyname"].lower()):
-                return 0
-
-        # Some group maintained packages (e.g. Debian QA) are never NMU's
-        if self.group_maint.has_key(changes["maintaineremail"].lower()):
-            return 0
-
-        return 1
-
-###############################################################################
-
 class Upload:
 
     def __init__(self, Cnf):
         self.Cnf = Cnf
-        # Read in the group-maint override file
-        self.nmu = nmu_p(Cnf)
         self.accept_count = 0
         self.accept_bytes = 0L
         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
@@ -180,7 +134,7 @@ class Upload:
                 d_changes[i] = changes[i]
         ## dsc
         for i in [ "source", "version", "maintainer", "fingerprint",
-                   "uploaders", "bts changelog" ]:
+                   "uploaders", "bts changelog", "dm-upload-allowed" ]:
             if dsc.has_key(i):
                 d_dsc[i] = dsc[i]
         ## dsc_files
@@ -272,8 +226,14 @@ class Upload:
                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
                 summary += file + "\n  to " + destination + "\n"
+               if not files[file].has_key("type"):
+                   files[file]["type"] = "unknown"
                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
-                    override_summary += "%s - %s %s\n" % (file, files[file]["priority"], files[file]["section"])
+                    # (queue/unchecked), there we have override entries already, use them
+                    # (process-new), there we dont have override entries, use the newly generated ones.
+                    override_prio = files[file].get("override priority", files[file]["priority"])
+                    override_sect = files[file].get("override section", files[file]["section"])
+                    override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
 
         short_summary = summary
 
@@ -302,55 +262,26 @@ class Upload:
             return summary
 
         bugs.sort()
-        if not self.nmu.is_an_nmu(self.pkg):
-            if changes["distribution"].has_key("experimental"):
-               # tag bugs as fixed-in-experimental for uploads to experimental
-               summary += "Setting bugs to severity fixed: "
-               control_message = ""
-               for bug in bugs:
-                   summary += "%s " % (bug)
-                   control_message += "tag %s + fixed-in-experimental\n" % (bug)
-               if action and control_message != "":
-                   Subst["__CONTROL_MESSAGE__"] = control_message
-                   mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
-                   utils.send_mail (mail_message)
-               if action:
-                   self.Logger.log(["setting bugs to fixed"]+bugs)
-
-
-           else:
-               summary += "Closing bugs: "
-               for bug in bugs:
-                   summary += "%s " % (bug)
-                   if action:
-                       Subst["__BUG_NUMBER__"] = bug
-                       if changes["distribution"].has_key("stable"):
-                           Subst["__STABLE_WARNING__"] = """
+        summary += "Closing bugs: "
+        for bug in bugs:
+            summary += "%s " % (bug)
+            if action:
+                Subst["__BUG_NUMBER__"] = bug
+                if changes["distribution"].has_key("stable"):
+                    Subst["__STABLE_WARNING__"] = """
 Note that this package is not part of the released stable Debian
 distribution.  It may have dependencies on other unreleased software,
 or other instabilities.  Please take care if you wish to install it.
 The update will eventually make its way into the next released Debian
 distribution."""
-                       else:
-                           Subst["__STABLE_WARNING__"] = ""
-                           mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
-                           utils.send_mail (mail_message)
-                if action:
-                    self.Logger.log(["closing bugs"]+bugs)
-
-       else:                     # NMU
-            summary += "Setting bugs to severity fixed: "
-            control_message = ""
-            for bug in bugs:
-                summary += "%s " % (bug)
-                control_message += "tag %s + fixed\n" % (bug)
-            if action and control_message != "":
-                Subst["__CONTROL_MESSAGE__"] = control_message
-                mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
-                utils.send_mail (mail_message)
-            if action:
-                self.Logger.log(["setting bugs to fixed"]+bugs)
+                else:
+                    Subst["__STABLE_WARNING__"] = ""
+                    mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
+                    utils.send_mail (mail_message)
+        if action:
+            self.Logger.log(["closing bugs"]+bugs)
         summary += "\n"
+
         return summary
 
     ###########################################################################
@@ -913,6 +844,8 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
         # Try and find all files mentioned in the .dsc.  This has
         # to work harder to cope with the multiple possible
         # locations of an .orig.tar.gz.
+        # The ordering on the select is needed to pick the newest orig
+        # when it exists in multiple places.
         for dsc_file in dsc_files.keys():
             found = None
             if files.has_key(dsc_file):
@@ -920,7 +853,7 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
                 actual_size = int(files[dsc_file]["size"])
                 found = "%s in incoming" % (dsc_file)
                 # Check the file does not already exist in the archive
-                q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
+                q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
                 ql = q.getresult()
                 # Strip out anything that isn't '%s' or '/%s$'
                 for i in ql:
@@ -999,10 +932,12 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
 
                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
                     # See process_it() in 'dak process-unchecked' for explanation of this
-                    if os.path.exists(in_unchecked):
+                   # in_unchecked check dropped by ajt 2007-08-28, how did that
+                   # ever make sense?
+                    if os.path.exists(in_unchecked) and False:
                         return (self.reject_message, in_unchecked)
                     else:
-                        for dir in [ "Accepted", "New", "Byhand" ]:
+                        for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
                             if os.path.exists(in_otherdir):
                                 in_otherdir_fh = utils.open_file(in_otherdir)