]> git.decadent.org.uk Git - dak.git/commitdiff
Merge commit 'lamby/lintian_autoreject' into merge
authorJoerg Jaspert <joerg@debian.org>
Tue, 27 Oct 2009 20:46:30 +0000 (21:46 +0100)
committerJoerg Jaspert <joerg@debian.org>
Tue, 27 Oct 2009 20:46:30 +0000 (21:46 +0100)
* commit 'lamby/lintian_autoreject':
  Disable check_lintian for now.
  Only lintian reject uploads to unstable or experimental
  Fix use of "wayout" name.
  dict[k] raises IndexError if does not exist - check with infix 'in' instead.
  Actually add a seperator to our --tags-from-file input.
  lintian YAML has a "lintian" root element.
  Close sourcefile.
  Dedent again by returning if lintian doesn't return any content.
  Return all the lintian-related rejections, not just the first one.
  It's called 'next', not 'continue'. =)
  Dedent again by using continue.
  Remove one level of indentation by using continue
  Simple check for lintian regex
  Use set() instead of Perlesque hash[key] = 1
  autoreject
  Add lintian tags file

Signed-off-by: Joerg Jaspert <joerg@debian.org>
31 files changed:
dak/add_user.py
dak/admin.py
dak/check_archive.py
dak/clean_suites.py
dak/contents.py
dak/cruft_report.py
dak/dak.py
dak/dakdb/update15.py [new file with mode: 0644]
dak/generate_index_diffs.py
dak/import_keyring.py
dak/import_ldap_fingerprints.py
dak/make_pkg_file_mapping.py
dak/process_accepted.py
dak/process_new.py
dak/process_unchecked.py
dak/queue_report.py
dak/rm.py
dak/show_deferred.py
dak/show_new.py
dak/stats.py
dak/transitions.py
dak/update_db.py
daklib/changes.py
daklib/daklog.py
daklib/dbconn.py
daklib/queue.py
daklib/regexes.py
daklib/srcformats.py [new file with mode: 0644]
daklib/utils.py
tests/test_regexes.py
tests/test_srcformats.py [new file with mode: 0755]

index 8da9dcdf3b6c5971ad8615a9a8e03666457003ad..77de3e3fb617290c769a2752d0e5db66b2e7234d 100755 (executable)
@@ -18,13 +18,9 @@ add his key to the GPGKeyring
 # I know what I say. I dont know python and I wrote it. So go and read some other stuff.
 
 import commands
-import re
 import sys
-import time
-import os
 import apt_pkg
 
-from daklib import daklog
 from daklib import utils
 from daklib.dbconn import DBConn, add_database_user, get_or_set_uid
 from daklib.regexes import re_gpg_fingerprint, re_user_address, re_user_mails, re_user_name
index e3d5298a36bf8d903b562b659414e37baed2f8a1..eb765a660cb00ac807e0daaee0305539eeaf3662 100755 (executable)
@@ -25,7 +25,6 @@ import apt_pkg
 
 from daklib import utils
 from daklib.dbconn import *
-from daklib.config import Config
 
 ################################################################################
 
index 6ca84c69823877eadf264edae87833e58a11ba4a..2162068e2b9d69b4885d0d5a10f2721092c9a939 100755 (executable)
@@ -40,7 +40,6 @@ import apt_inst
 
 from daklib.dbconn import *
 from daklib import utils
-from daklib.regexes import re_issource
 from daklib.config import Config
 
 ################################################################################
@@ -68,7 +67,7 @@ The following MODEs are available:
   missing-overrides  - check for missing overrides
   source-in-one-dir  - ensure the source for each package is in one directory
   timestamps         - check for future timestamps in .deb's
-  tar-gz-in-dsc      - ensure each .dsc lists a .tar.gz file
+  files-in-dsc       - ensure each .dsc references appropriate Files
   validate-indices   - ensure files mentioned in Packages & Sources exist
   files-not-symlinks - check files in the database aren't symlinks
   validate-builddeps - validate build-dependencies of .dsc files in the archive
@@ -320,9 +319,10 @@ def check_timestamps():
 
 ################################################################################
 
-def check_missing_tar_gz_in_dsc():
+def check_files_in_dsc():
     """
-    Ensure each .dsc lists a .tar.gz file
+    Ensure each .dsc lists appropriate files in its Files field (according
+    to the format announced in its Format field).
     """
     count = 0
 
@@ -343,19 +343,11 @@ def check_missing_tar_gz_in_dsc():
         except:
             utils.fubar("error parsing .dsc file '%s'." % (filename))
 
-        dsc_files = utils.build_file_list(dsc, is_a_dsc=1)
-        has_tar = 0
+        reasons = utils.check_dsc_files(filename, dsc)
+        for r in reasons:
+            utils.warn(r)
 
-        for f in dsc_files.keys():
-            m = re_issource.match(f)
-            if not m:
-                utils.fubar("%s not recognised as source." % (f))
-            ftype = m.group(3)
-            if ftype == "orig.tar.gz" or ftype == "tar.gz":
-                has_tar = 1
-
-        if not has_tar:
-            utils.warn("%s has no .tar.gz in the .dsc file." % (f))
+        if len(reasons) > 0:
             count += 1
 
     if count:
@@ -526,8 +518,8 @@ def main ():
         check_source_in_one_dir()
     elif mode == "timestamps":
         check_timestamps()
-    elif mode == "tar-gz-in-dsc":
-        check_missing_tar_gz_in_dsc()
+    elif mode == "files-in-dsc":
+        check_files_in_dsc()
     elif mode == "validate-indices":
         check_indices_files_exist()
     elif mode == "files-not-symlinks":
index d40f67ecbccf54cf04a9e9e0626a9ac903bd03cc..dc4f019722dcdefc5267b49b485715a2906d0937 100755 (executable)
@@ -35,10 +35,12 @@ from datetime import datetime, timedelta
 from daklib.config import Config
 from daklib.dbconn import *
 from daklib import utils
+from daklib import daklog
 
 ################################################################################
 
 Options = None
+Logger = None
 
 ################################################################################
 
@@ -59,13 +61,13 @@ def check_binaries(now_date, delete_date, max_delete, session):
     # Get the list of binary packages not in a suite and mark them for
     # deletion.
 
-    # TODO: This can be a single SQL UPDATE statement
     q = session.execute("""
-SELECT b.file FROM binaries b, files f
+SELECT b.file, f.filename FROM binaries b, files f
  WHERE f.last_used IS NULL AND b.file = f.id
    AND NOT EXISTS (SELECT 1 FROM bin_associations ba WHERE ba.bin = b.id)""")
 
     for i in q.fetchall():
+        Logger.log(["set lastused", i[1]])
         session.execute("UPDATE files SET last_used = :lastused WHERE id = :fileid AND last_used IS NULL",
                         {'lastused': now_date, 'fileid': i[0]})
     session.commit()
@@ -73,13 +75,13 @@ SELECT b.file FROM binaries b, files f
     # Check for any binaries which are marked for eventual deletion
     # but are now used again.
 
-    # TODO: This can be a single SQL UPDATE statement
     q = session.execute("""
-SELECT b.file FROM binaries b, files f
+SELECT b.file, f.filename FROM binaries b, files f
    WHERE f.last_used IS NOT NULL AND f.id = b.file
     AND EXISTS (SELECT 1 FROM bin_associations ba WHERE ba.bin = b.id)""")
 
     for i in q.fetchall():
+        Logger.log(["unset lastused", i[1]])
         session.execute("UPDATE files SET last_used = NULL WHERE id = :fileid", {'fileid': i[0]})
     session.commit()
 
@@ -91,7 +93,7 @@ def check_sources(now_date, delete_date, max_delete, session):
     # Get the list of source packages not in a suite and not used by
     # any binaries.
     q = session.execute("""
-SELECT s.id, s.file FROM source s, files f
+SELECT s.id, s.file, f.filename FROM source s, files f
   WHERE f.last_used IS NULL AND s.file = f.id
     AND NOT EXISTS (SELECT 1 FROM src_associations sa WHERE sa.source = s.id)
     AND NOT EXISTS (SELECT 1 FROM binaries b WHERE b.source = s.id)""")
@@ -103,20 +105,24 @@ SELECT s.id, s.file FROM source s, files f
     for i in q.fetchall():
         source_id = i[0]
         dsc_file_id = i[1]
+        dsc_fname = i[2]
 
         # Mark the .dsc file for deletion
+        Logger.log(["set lastused", dsc_fname])
         session.execute("""UPDATE files SET last_used = :last_used
                                     WHERE id = :dscfileid AND last_used IS NULL""",
                         {'last_used': now_date, 'dscfileid': dsc_file_id})
 
         # Mark all other files references by .dsc too if they're not used by anyone else
-        x = session.execute("""SELECT f.id FROM files f, dsc_files d
+        x = session.execute("""SELECT f.id, f.filename FROM files f, dsc_files d
                               WHERE d.source = :sourceid AND d.file = f.id""",
                              {'sourceid': source_id})
         for j in x.fetchall():
             file_id = j[0]
+            file_name = j[1]
             y = session.execute("SELECT id FROM dsc_files d WHERE d.file = :fileid", {'fileid': file_id})
             if len(y.fetchall()) == 1:
+                Logger.log(["set lastused", file_name])
                 session.execute("""UPDATE files SET last_used = :lastused
                                   WHERE id = :fileid AND last_used IS NULL""",
                                 {'lastused': now_date, 'fileid': file_id})
@@ -127,7 +133,7 @@ SELECT s.id, s.file FROM source s, files f
     # are now used again.
 
     q = session.execute("""
-SELECT f.id FROM source s, files f, dsc_files df
+SELECT f.id, f.filename FROM source s, files f, dsc_files df
   WHERE f.last_used IS NOT NULL AND s.id = df.source AND df.file = f.id
     AND ((EXISTS (SELECT 1 FROM src_associations sa WHERE sa.source = s.id))
       OR (EXISTS (SELECT 1 FROM binaries b WHERE b.source = s.id)))""")
@@ -135,9 +141,8 @@ SELECT f.id FROM source s, files f, dsc_files df
     #### XXX: this should also handle deleted binaries specially (ie, not
     ####      reinstate sources because of them
 
-    # Could be done in SQL; but left this way for hysterical raisins
-    # [and freedom to innovate don'cha know?]
     for i in q.fetchall():
+        Logger.log(["unset lastused", i[1]])
         session.execute("UPDATE files SET last_used = NULL WHERE id = :fileid",
                         {'fileid': i[0]})
 
@@ -162,9 +167,10 @@ SELECT id, filename FROM files f
 
     ql = q.fetchall()
     if len(ql) > 0:
-        print "WARNING: check_files found something it shouldn't"
+        utils.warn("check_files found something it shouldn't")
         for x in ql:
-            print x
+            utils.warn("orphaned file: %s" % x)
+            Logger.log(["set lastused", x[1], "ORPHANED FILE"])
             session.execute("UPDATE files SET last_used = :lastused WHERE id = :fileid",
                             {'lastused': now_date, 'fileid': x[0]})
 
@@ -177,12 +183,13 @@ def clean_binaries(now_date, delete_date, max_delete, session):
     # XXX: why doesn't this remove the files here as well? I don't think it
     #      buys anything keeping this separate
     print "Cleaning binaries from the DB..."
+    print "Deleting from binaries table... "
+    for bin in session.query(DBBinary).join(DBBinary.poolfile).filter(PoolFile.last_used <= delete_date):
+        Logger.log(["delete binary", bin.poolfile.filename])
+        if not Options["No-Action"]:
+            session.delete(bin)
     if not Options["No-Action"]:
-        print "Deleting from binaries table... "
-        session.execute("""DELETE FROM binaries WHERE EXISTS
-                              (SELECT 1 FROM files WHERE binaries.file = files.id
-                                         AND files.last_used <= :deldate)""",
-                           {'deldate': delete_date})
+        session.commit()
 
 ########################################
 
@@ -200,41 +207,37 @@ def clean(now_date, delete_date, max_delete, session):
         os.mkdir(dest)
 
     # Delete from source
-    if not Options["No-Action"]:
-        print "Deleting from source table... "
-        session.execute("""DELETE FROM dsc_files
-                            WHERE EXISTS
-                               (SELECT 1 FROM source s, files f, dsc_files df
-                                 WHERE f.last_used <= :deletedate
-                                   AND s.file = f.id AND s.id = df.source
-                                   AND df.id = dsc_files.id)""", {'deletedate': delete_date})
-        session.execute("""DELETE FROM source
-                            WHERE EXISTS
-                               (SELECT 1 FROM files
-                                 WHERE source.file = files.id
-                                   AND files.last_used <= :deletedate)""", {'deletedate': delete_date})
+    print "Deleting from source table... "
+    q = session.execute("""
+SELECT s.id, f.filename FROM source s, files f
+  WHERE f.last_used <= :deletedate
+        AND s.file = f.id""", {'deletedate': delete_date})
+    for s in q.fetchall():
+        Logger.log(["delete source", s[1], s[0]])
+        if not Options["No-Action"]:
+            session.execute("DELETE FROM dsc_files WHERE source = :s_id", {"s_id":s[0]})
+            session.execute("DELETE FROM source WHERE id = :s_id", {"s_id":s[0]})
 
+    if not Options["No-Action"]:
         session.commit()
 
     # Delete files from the pool
-    query = """SELECT l.path, f.filename FROM location l, files f
-              WHERE f.last_used <= :deletedate AND l.id = f.location"""
+    old_files = session.query(PoolFile).filter(PoolFile.last_used <= delete_date)
     if max_delete is not None:
-        query += " LIMIT %d" % max_delete
+        old_files = old_files.limit(max_delete)
         print "Limiting removals to %d" % max_delete
 
-    q = session.execute(query, {'deletedate': delete_date})
-    for i in q.fetchall():
-        filename = i[0] + i[1]
+    for pf in old_files:
+        filename = os.path.join(pf.location.path, pf.filename)
         if not os.path.exists(filename):
             utils.warn("can not find '%s'." % (filename))
             continue
+        Logger.log(["delete pool file", filename])
         if os.path.isfile(filename):
             if os.path.islink(filename):
                 count += 1
-                if Options["No-Action"]:
-                    print "Removing symlink %s..." % (filename)
-                else:
+                Logger.log(["delete symlink", filename])
+                if not Options["No-Action"]:
                     os.unlink(filename)
             else:
                 size += os.stat(filename)[stat.ST_SIZE]
@@ -245,20 +248,17 @@ def clean(now_date, delete_date, max_delete, session):
                 if os.path.exists(dest_filename):
                     dest_filename = utils.find_next_free(dest_filename)
 
-                if Options["No-Action"]:
-                    print "Cleaning %s -> %s ..." % (filename, dest_filename)
-                else:
+                Logger.log(["move to morgue", filename, dest_filename])
+                if not Options["No-Action"]:
                     utils.move(filename, dest_filename)
+
+            if not Options["No-Action"]:
+                session.delete(pf)
+
         else:
             utils.fubar("%s is neither symlink nor file?!" % (filename))
 
-    # Delete from the 'files' table
-    # XXX: I've a horrible feeling that the max_delete stuff breaks here - mhy
-    # TODO: Change it so we do the DELETEs as we go; it'll be slower but
-    #       more reliable
     if not Options["No-Action"]:
-        print "Deleting from files table... "
-        session.execute("DELETE FROM files WHERE last_used <= :deletedate", {'deletedate': delete_date})
         session.commit()
 
     if count > 0:
@@ -271,7 +271,7 @@ def clean_maintainers(now_date, delete_date, max_delete, session):
 
     # TODO Replace this whole thing with one SQL statement
     q = session.execute("""
-SELECT m.id FROM maintainer m
+SELECT m.id, m.name FROM maintainer m
   WHERE NOT EXISTS (SELECT 1 FROM binaries b WHERE b.maintainer = m.id)
     AND NOT EXISTS (SELECT 1 FROM source s WHERE s.maintainer = m.id OR s.changedby = m.id)
     AND NOT EXISTS (SELECT 1 FROM src_uploaders u WHERE u.maintainer = m.id)""")
@@ -280,9 +280,10 @@ SELECT m.id FROM maintainer m
 
     for i in q.fetchall():
         maintainer_id = i[0]
+        Logger.log(["delete maintainer", i[1]])
         if not Options["No-Action"]:
             session.execute("DELETE FROM maintainer WHERE id = :maint", {'maint': maintainer_id})
-            count += 1
+        count += 1
 
     if not Options["No-Action"]:
         session.commit()
@@ -297,7 +298,7 @@ def clean_fingerprints(now_date, delete_date, max_delete, session):
 
     # TODO Replace this whole thing with one SQL statement
     q = session.execute("""
-SELECT f.id FROM fingerprint f
+SELECT f.id, f.fingerprint FROM fingerprint f
   WHERE f.keyring IS NULL
     AND NOT EXISTS (SELECT 1 FROM binaries b WHERE b.sig_fpr = f.id)
     AND NOT EXISTS (SELECT 1 FROM source s WHERE s.sig_fpr = f.id)""")
@@ -306,9 +307,10 @@ SELECT f.id FROM fingerprint f
 
     for i in q.fetchall():
         fingerprint_id = i[0]
+        Logger.log(["delete fingerprint", i[1]])
         if not Options["No-Action"]:
             session.execute("DELETE FROM fingerprint WHERE id = :fpr", {'fpr': fingerprint_id})
-            count += 1
+        count += 1
 
     if not Options["No-Action"]:
         session.commit()
@@ -330,24 +332,22 @@ def clean_queue_build(now_date, delete_date, max_delete, session):
     our_delete_date = now_date - timedelta(seconds = int(cnf["Clean-Suites::QueueBuildStayOfExecution"]))
     count = 0
 
-    q = session.execute("SELECT filename FROM queue_build WHERE last_used <= :deletedate",
-                        {'deletedate': our_delete_date})
-    for i in q.fetchall():
-        filename = i[0]
-        if not os.path.exists(filename):
-            utils.warn("%s (from queue_build) doesn't exist." % (filename))
+    for qf in session.query(QueueBuild).filter(QueueBuild.last_used <= our_delete_date):
+        if not os.path.exists(qf.filename):
+            utils.warn("%s (from queue_build) doesn't exist." % (qf.filename))
             continue
 
-        if not cnf.FindB("Dinstall::SecurityQueueBuild") and not os.path.islink(filename):
-            utils.fubar("%s (from queue_build) should be a symlink but isn't." % (filename))
+        if not cnf.FindB("Dinstall::SecurityQueueBuild") and not os.path.islink(qf.filename):
+            utils.fubar("%s (from queue_build) should be a symlink but isn't." % (qf.filename))
 
-        os.unlink(filename)
+        Logger.log(["delete queue build", qf.filename])
+        if not Options["No-Action"]:
+            os.unlink(qf.filename)
+            session.delete(qf)
         count += 1
 
-    session.execute("DELETE FROM queue_build WHERE last_used <= :deletedate",
-                    {'deletedate': our_delete_date})
-
-    session.commit()
+    if not Options["No-Action"]:
+        session.commit()
 
     if count:
         print "Cleaned %d queue_build files." % (count)
@@ -355,7 +355,7 @@ def clean_queue_build(now_date, delete_date, max_delete, session):
 ################################################################################
 
 def main():
-    global Options
+    global Options, Logger
 
     cnf = Config()
 
@@ -384,6 +384,8 @@ def main():
     if Options["Help"]:
         usage()
 
+    Logger = daklog.Logger(cnf, "clean-suites", debug=Options["No-Action"])
+
     session = DBConn().session()
 
     now_date = datetime.now()
@@ -398,6 +400,8 @@ def main():
     clean_fingerprints(now_date, delete_date, max_delete, session)
     clean_queue_build(now_date, delete_date, max_delete, session)
 
+    Logger.close()
+
 ################################################################################
 
 if __name__ == '__main__':
index 9ac99951400e675aade746683566ebeaeaf73ce8..c435afc574879963c71b83218347fd744f2a3f7f 100755 (executable)
@@ -37,7 +37,6 @@ Create all the contents files
 import sys
 import os
 import logging
-import math
 import gzip
 import threading
 import Queue
index cd63c2da52b04661f8eb18a52903354e8289cb82..63374859fa8019a10165ec318dea5cea14c38ce2 100755 (executable)
@@ -29,7 +29,7 @@
 
 ################################################################################
 
-import commands, os, sys, time, re
+import commands, os, sys, re
 import apt_pkg
 
 from daklib.config import Config
index f3380091f32ec81537181a7aacb270493123c3a1..052f3b3ef7b8b7e48717116809771f20d80d75d6 100755 (executable)
@@ -34,7 +34,6 @@ G{importgraph}
 ################################################################################
 
 import sys
-import imp
 import daklib.utils
 
 ################################################################################
diff --git a/dak/dakdb/update15.py b/dak/dakdb/update15.py
new file mode 100644 (file)
index 0000000..535f9e6
--- /dev/null
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding table for allowed source formats
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009  Raphael Hertzog <hertzog@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def do_update(self):
+    print "Adding tables listing allowed source formats"
+
+    try:
+        c = self.db.cursor()
+        c.execute("""
+            CREATE TABLE src_format (
+                    id SERIAL PRIMARY KEY,
+                    format_name TEXT NOT NULL,
+                    UNIQUE (format_name)
+            )
+        """)
+        c.execute("INSERT INTO src_format (format_name) VALUES('1.0')")
+        c.execute("INSERT INTO src_format (format_name) VALUES('3.0 (quilt)')")
+        c.execute("INSERT INTO src_format (format_name) VALUES('3.0 (native)')")
+
+        c.execute("""
+            CREATE TABLE suite_src_formats (
+                    suite INT4 NOT NULL REFERENCES suite(id),
+                    src_format INT4 NOT NULL REFERENCES src_format(id),
+                    PRIMARY KEY (suite, src_format)
+            )
+        """)
+
+        print "Authorize format 1.0 on all suites by default"
+        c.execute("SELECT id FROM suite")
+        suites = c.fetchall()
+        c.execute("SELECT id FROM src_format WHERE format_name = '1.0'")
+        formats = c.fetchall()
+        for s in suites:
+            for f in formats:
+                c.execute("INSERT INTO suite_src_formats (suite, src_format) VALUES(%s, %s)", (s[0], f[0]))
+
+        print "Authorize all other formats on tpu, unstable & experimental by default"
+        c.execute("SELECT id FROM suite WHERE suite_name IN ('testing-proposed-updates', 'unstable', 'experimental')")
+        suites = c.fetchall()
+        c.execute("SELECT id FROM src_format WHERE format_name != '1.0'")
+        formats = c.fetchall()
+        for s in suites:
+            for f in formats:
+                c.execute("INSERT INTO suite_src_formats (suite, src_format) VALUES(%s, %s)", (s[0], f[0]))
+
+        c.execute("UPDATE config SET value = '15' WHERE name = 'db_revision'")
+        self.db.commit()
+
+    except psycopg2.ProgrammingError, msg:
+        self.db.rollback()
+        raise DBUpdateError, "Unable to apply source format update 15, rollback issued. Error message : %s" % (str(msg))
index 21c631b9a39ba25eef58d3f879bf7bdcc9a406c0..4222c0cf4fb9446b0bc19a7127dab4e9fe4da24e 100755 (executable)
@@ -34,7 +34,6 @@
 import sys
 import os
 import tempfile
-import subprocess
 import time
 import apt_pkg
 
index 06597f85176c6077db21ad5200b48bb3ccb8134a..0b670357f2999c9d8d1a066e1290eaa24eda1f42 100755 (executable)
@@ -24,7 +24,6 @@ import apt_pkg, ldap, email.Utils
 
 from daklib.config import Config
 from daklib.dbconn import *
-from daklib import utils
 
 
 # Globals
index ec27acbefa215a89e08ce283112732d9708d80de..337edb61303991879de5bc6a9f7e9827bc49b9c0 100755 (executable)
@@ -44,7 +44,7 @@
 
 ################################################################################
 
-import commands, ldap, re, sys
+import commands, ldap, sys
 import apt_pkg
 
 from daklib.config import Config
index 38a6bec2aeb65eba5edbfb113181f68195b2cc73..c457820fc0faf53d1e0e53dcbb3bc80e80832313 100755 (executable)
@@ -31,9 +31,6 @@ and binary package version it has in a standard rfc2822-like format.
 
 ################################################################################
 
-import os
-import sys
-
 from daklib.dbconn import *
 
 ################################################################################
index d7db1172f91be48c002c8200f9cc4ca2cb8165ef..b18346c83655e2023d0c160c7754b3ad5c4f169d 100755 (executable)
@@ -40,14 +40,12 @@ import fcntl
 import os
 import sys
 from datetime import datetime
-import re
-import apt_pkg, commands
+import apt_pkg
 
 from daklib import daklog
 from daklib.queue import *
 from daklib import utils
 from daklib.dbconn import *
-from daklib.binary import copy_temporary_contents
 from daklib.dak_exceptions import *
 from daklib.regexes import re_default_answer, re_issource, re_fdnic
 from daklib.urgencylog import UrgencyLog
@@ -210,7 +208,7 @@ def add_dsc_to_db(u, filename, session):
         df = DSCFile()
         df.source_id = source.source_id
 
-        # If the .orig.tar.gz is already in the pool, it's
+        # If the .orig tarball is already in the pool, it's
         # files id is stored in dsc_files by check_dsc().
         files_id = dentry.get("files id", None)
 
@@ -353,32 +351,37 @@ def install(u, session, log_urgency=True):
             add_deb_to_db(u, newfile, session)
 
     # If this is a sourceful diff only upload that is moving
-    # cross-component we need to copy the .orig.tar.gz into the new
+    # cross-component we need to copy the .orig files into the new
     # component too for the same reasons as above.
-    #
-    if u.pkg.changes["architecture"].has_key("source") and u.pkg.orig_tar_id and \
-       u.pkg.orig_tar_location != dsc_location_id:
-
-        oldf = get_poolfile_by_id(u.pkg.orig_tar_id, session)
-        old_filename = os.path.join(oldf.location.path, oldf.filename)
-        old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
-                   'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
-
-        new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
-
-        # TODO: Care about size/md5sum collisions etc
-        (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
-
-        if newf is None:
-            utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
-            newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
-
-            # TODO: Check that there's only 1 here
-            source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
-            dscf = get_dscfiles(source_id = source.source_id, poolfile_id=u.pkg.orig_tar_id, session=session)[0]
-            dscf.poolfile_id = newf.file_id
-            session.add(dscf)
-            session.flush()
+    if u.pkg.changes["architecture"].has_key("source"):
+        for orig_file in u.pkg.orig_files.keys():
+            if not u.pkg.orig_files[orig_file].has_key("id"):
+                continue # Skip if it's not in the pool
+            orig_file_id = u.pkg.orig_files[orig_file]["id"]
+            if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
+                continue # Skip if the location didn't change
+
+            # Do the move
+            oldf = get_poolfile_by_id(orig_file_id, session)
+            old_filename = os.path.join(oldf.location.path, oldf.filename)
+            old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
+                       'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
+
+            new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
+
+            # TODO: Care about size/md5sum collisions etc
+            (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
+
+            if newf is None:
+                utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
+                newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
+
+                # TODO: Check that there's only 1 here
+                source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
+                dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
+                dscf.poolfile_id = newf.file_id
+                session.add(dscf)
+                session.flush()
 
     # Install the files into the pool
     for newfile, entry in u.pkg.files.items():
@@ -452,15 +455,17 @@ def install(u, session, log_urgency=True):
                     os.unlink(dest)
                 os.symlink(src, dest)
 
-        # Update last_used on any non-upload .orig.tar.gz symlink
-        if u.pkg.orig_tar_id:
+        # Update last_used on any non-uploaded .orig symlink
+        for orig_file in u.pkg.orig_files.keys():
             # Determine the .orig.tar.gz file name
-            for dsc_file in u.pkg.dsc_files.keys():
-                if dsc_file.endswith(".orig.tar.gz"):
-                    u.pkg.orig_tar_gz = os.path.join(dest_dir, dsc_file)
+            if not u.pkg.orig_files[orig_file].has_key("id"):
+                continue # Skip files not in the pool
+            # XXX: do we really want to update the orig_files dict here
+            # instead of using a temporary variable?
+            u.pkg.orig_files[orig_file]["path"] = os.path.join(dest_dir, orig_file)
 
             # Remove it from the list of packages for later processing by apt-ftparchive
-            qb = get_queue_build(u.pkg.orig_tar_gz, suite.suite_id, session)
+            qb = get_queue_build(u.pkg.orig_files[orig_file]["path"], suite.suite_id, session)
             if qb:
                 qb.in_queue = False
                 qb.last_used = now_date
index f15a56003f67ae82e5ec347328d94bdae8100125..1423d7056eb791f3b7aa5767a9f48bf60d37a4b5 100755 (executable)
@@ -6,6 +6,7 @@
 @contact: Debian FTP Master <ftpmaster@debian.org>
 @copyright: 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@copyright: 2009 Frank Lichtenheld <djpig@debian.org>
 @license: GNU General Public License version 2 or later
 """
 # This program is free software; you can redistribute it and/or modify
@@ -55,40 +56,30 @@ import pwd
 import apt_pkg, apt_inst
 import examine_package
 
-from daklib import database
+from daklib.dbconn import *
+from daklib.queue import *
 from daklib import daklog
-from daklib import queue
 from daklib import utils
 from daklib.regexes import re_no_epoch, re_default_answer, re_isanum
 from daklib.dak_exceptions import CantOpenError, AlreadyLockedError, CantGetLockError
 from daklib.summarystats import SummaryStats
+from daklib.config import Config
 
 # Globals
-Cnf = None       #: Configuration, apt_pkg.Configuration
 Options = None
-Upload = None
-projectB = None  #: database connection, pgobject
 Logger = None
 
 Priorities = None
 Sections = None
 
-reject_message = ""
-
 ################################################################################
 ################################################################################
 ################################################################################
 
-def reject (str, prefix="Rejected: "):
-    global reject_message
-    if str:
-        reject_message += prefix + str + "\n"
-
-def recheck():
-    global reject_message
-    files = Upload.pkg.files
-    reject_message = ""
+def recheck(upload, session):
+    files = upload.pkg.files
 
+    cnf = Config()
     for f in files.keys():
         # The .orig.tar.gz can disappear out from under us is it's a
         # duplicate of one in the archive.
@@ -98,32 +89,31 @@ def recheck():
         if files[f]["type"] == "deb":
             source_version = files[f]["source version"]
             source_package = files[f]["source package"]
-            if not Upload.pkg.changes["architecture"].has_key("source") \
-               and not Upload.source_exists(source_package, source_version, Upload.pkg.changes["distribution"].keys()):
+            if not upload.pkg.changes["architecture"].has_key("source") \
+               and not upload.source_exists(source_package, source_version, upload.pkg.changes["distribution"].keys()):
                 source_epochless_version = re_no_epoch.sub('', source_version)
                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
                 found = 0
                 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
-                    if Cnf.has_key("Dir::Queue::%s" % (q)):
-                        if os.path.exists(Cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+                    if cnf.has_key("Dir::Queue::%s" % (q)):
+                        if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
                             found = 1
                 if not found:
-                    reject("no source found for %s %s (%s)." % (source_package, source_version, f))
+                    upload.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
 
         # Version and file overwrite checks
         if files[f]["type"] == "deb":
-            reject(Upload.check_binary_against_db(f), "")
+            upload.check_binary_against_db(f, session)
         elif files[f]["type"] == "dsc":
-            reject(Upload.check_source_against_db(f), "")
-            (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(f)
-            reject(reject_msg, "")
+            upload.check_source_against_db(f, session)
+            upload.check_dsc_against_db(f, session)
 
-    if reject_message.find("Rejected") != -1:
+    if len(upload.rejects) > 0:
         answer = "XXX"
         if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
             answer = 'S'
 
-        print "REJECT\n" + reject_message,
+        print "REJECT\n" + upload.rejects.join("\n"),
         prompt = "[R]eject, Skip, Quit ?"
 
         while prompt.find(answer) == -1:
@@ -134,8 +124,8 @@ def recheck():
             answer = answer[:1].upper()
 
         if answer == 'R':
-            Upload.do_reject(0, reject_message)
-            os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+            upload.do_reject(manual=0, reject_message=upload.rejects.join("\n"))
+            os.unlink(upload.pkg.changes_file[:-8]+".dak")
             return 0
         elif answer == 'S':
             return 0
@@ -186,7 +176,7 @@ def sg_compare (a, b):
     # Sort by time of oldest upload
     return cmp(a["oldest"], b["oldest"])
 
-def sort_changes(changes_files):
+def sort_changes(changes_files, session):
     """Sort into source groups, then sort each source group by version,
     have source, filename.  Finally, sort the source groups by have
     note, time of oldest upload of each source upload."""
@@ -197,11 +187,11 @@ def sort_changes(changes_files):
     cache = {}
     # Read in all the .changes files
     for filename in changes_files:
+        u = Upload()
         try:
-            Upload.pkg.changes_file = filename
-            Upload.init_vars()
-            Upload.update_vars()
-            cache[filename] = copy.copy(Upload.pkg.changes)
+            u.pkg.load_dot_dak(filename)
+            u.update_subst()
+            cache[filename] = copy.copy(u.pkg.changes)
             cache[filename]["filename"] = filename
         except:
             sorted_list.append(filename)
@@ -216,9 +206,8 @@ def sort_changes(changes_files):
         per_source[source]["list"].append(cache[filename])
     # Determine oldest time and have note status for each source group
     for source in per_source.keys():
-        q = projectB.query("SELECT 1 FROM source WHERE source = '%s'" % source)
-        ql = q.getresult()
-        per_source[source]["source_in_database"] = len(ql)>0
+        q = session.query(DBSource).filter_by(source = source).all()
+        per_source[source]["source_in_database"] = len(q)>0
         source_list = per_source[source]["list"]
         first = source_list[0]
         oldest = os.stat(first["filename"])[stat.ST_MTIME]
@@ -227,7 +216,7 @@ def sort_changes(changes_files):
             mtime = os.stat(d["filename"])[stat.ST_MTIME]
             if mtime < oldest:
                 oldest = mtime
-            have_note += (database.has_new_comment(d["source"], d["version"], True))
+            have_note += has_new_comment(d["source"], d["version"], session)
         per_source[source]["oldest"] = oldest
         if not have_note:
             per_source[source]["note_state"] = 0; # none
@@ -246,12 +235,11 @@ def sort_changes(changes_files):
 ################################################################################
 
 class Section_Completer:
-    def __init__ (self):
+    def __init__ (self, session):
         self.sections = []
         self.matches = []
-        q = projectB.query("SELECT section FROM section")
-        for i in q.getresult():
-            self.sections.append(i[0])
+        for s, in session.query(Section.section):
+            self.sections.append(s)
 
     def complete(self, text, state):
         if state == 0:
@@ -268,12 +256,11 @@ class Section_Completer:
 ############################################################
 
 class Priority_Completer:
-    def __init__ (self):
+    def __init__ (self, session):
         self.priorities = []
         self.matches = []
-        q = projectB.query("SELECT priority FROM priority")
-        for i in q.getresult():
-            self.priorities.append(i[0])
+        for p, in session.query(Priority.priority):
+            self.priorities.append(p)
 
     def complete(self, text, state):
         if state == 0:
@@ -289,9 +276,9 @@ class Priority_Completer:
 
 ################################################################################
 
-def print_new (new, indexed, file=sys.stdout):
-    queue.check_valid(new)
-    broken = 0
+def print_new (new, upload, indexed, file=sys.stdout):
+    check_valid(new)
+    broken = False
     index = 0
     for pkg in new.keys():
         index += 1
@@ -299,21 +286,22 @@ def print_new (new, indexed, file=sys.stdout):
         priority = new[pkg]["priority"]
         if new[pkg]["section id"] == -1:
             section += "[!]"
-            broken = 1
+            broken = True
         if new[pkg]["priority id"] == -1:
             priority += "[!]"
-            broken = 1
+            broken = True
         if indexed:
             line = "(%s): %-20s %-20s %-20s" % (index, pkg, priority, section)
         else:
             line = "%-20s %-20s %-20s" % (pkg, priority, section)
         line = line.strip()+'\n'
         file.write(line)
-    note = database.get_new_comments(Upload.pkg.changes.get("source"))
-    if len(note) > 0:
-        for line in note:
-            print line
-    return broken, note
+    notes = get_new_comments(upload.pkg.changes.get("source"))
+    for note in notes:
+        print "\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
+              % (note.author, note.version, note.notedate, note.comment)
+        print "-" * 72
+    return broken, len(notes) > 0
 
 ################################################################################
 
@@ -326,11 +314,11 @@ def index_range (index):
 ################################################################################
 ################################################################################
 
-def edit_new (new):
+def edit_new (new, upload):
     # Write the current data to a temporary file
     (fd, temp_filename) = utils.temp_filename()
     temp_file = os.fdopen(fd, 'w')
-    print_new (new, 0, temp_file)
+    print_new (new, upload, indexed=0, file=temp_file)
     temp_file.close()
     # Spawn an editor on that file
     editor = os.environ.get("EDITOR","vi")
@@ -360,14 +348,14 @@ def edit_new (new):
             if priority.endswith("[!]"):
                 priority = priority[:-3]
             for f in new[pkg]["files"]:
-                Upload.pkg.files[f]["section"] = section
-                Upload.pkg.files[f]["priority"] = priority
+                upload.pkg.files[f]["section"] = section
+                upload.pkg.files[f]["priority"] = priority
             new[pkg]["section"] = section
             new[pkg]["priority"] = priority
 
 ################################################################################
 
-def edit_index (new, index):
+def edit_index (new, upload, index):
     priority = new[index]["priority"]
     section = new[index]["section"]
     ftype = new[index]["type"]
@@ -426,19 +414,19 @@ def edit_index (new, index):
         readline.set_completer(None)
 
     for f in new[index]["files"]:
-        Upload.pkg.files[f]["section"] = section
-        Upload.pkg.files[f]["priority"] = priority
+        upload.pkg.files[f]["section"] = section
+        upload.pkg.files[f]["priority"] = priority
     new[index]["priority"] = priority
     new[index]["section"] = section
     return new
 
 ################################################################################
 
-def edit_overrides (new):
+def edit_overrides (new, upload, session):
     print
     done = 0
     while not done:
-        print_new (new, 1)
+        print_new (new, upload, indexed=1)
         new_index = {}
         index = 0
         for i in new.keys():
@@ -462,17 +450,17 @@ def edit_overrides (new):
                     got_answer = 1
 
         if answer == 'E':
-            edit_new(new)
+            edit_new(new, upload)
         elif answer == 'D':
             done = 1
         else:
-            edit_index (new, new_index[answer])
+            edit_index (new, upload, new_index[answer])
 
     return new
 
 ################################################################################
 
-def edit_note(note):
+def edit_note(note, upload, session):
     # Write the current data to a temporary file
     (fd, temp_filename) = utils.temp_filename()
     editor = os.environ.get("EDITOR","vi")
@@ -499,19 +487,26 @@ def edit_note(note):
         end()
         sys.exit(0)
 
-    database.add_new_comment(Upload.pkg.changes["source"], Upload.pkg.changes["version"], newnote, utils.whoami(), bool(Options["Trainee"]))
+    comment = NewComment()
+    comment.package = upload.pkg.changes["source"]
+    comment.version = upload.pkg.changes["version"]
+    comment.comment = newnote
+    comment.author  = utils.whoami()
+    comment.trainee = bool(Options["Trainee"])
+    session.add(comment)
+    session.commit()
 
 ################################################################################
 
-def check_pkg ():
+def check_pkg (upload):
     try:
         less_fd = os.popen("less -R -", 'w', 0)
         stdout_fd = sys.stdout
         try:
             sys.stdout = less_fd
-            changes = utils.parse_changes (Upload.pkg.changes_file)
-            examine_package.display_changes(changes['distribution'], Upload.pkg.changes_file)
-            files = Upload.pkg.files
+            changes = utils.parse_changes (upload.pkg.changes_file)
+            examine_package.display_changes(changes['distribution'], upload.pkg.changes_file)
+            files = upload.pkg.files
             for f in files.keys():
                 if files[f].has_key("new"):
                     ftype = files[f]["type"]
@@ -536,8 +531,8 @@ def check_pkg ():
 
 ## FIXME: horribly Debian specific
 
-def do_bxa_notification():
-    files = Upload.pkg.files
+def do_bxa_notification(upload):
+    files = upload.pkg.files
     summary = ""
     for f in files.keys():
         if files[f]["type"] == "deb":
@@ -545,40 +540,41 @@ def do_bxa_notification():
             summary += "\n"
             summary += "Package: %s\n" % (control.Find("Package"))
             summary += "Description: %s\n" % (control.Find("Description"))
-    Upload.Subst["__BINARY_DESCRIPTIONS__"] = summary
-    bxa_mail = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-new.bxa_notification")
+    upload.Subst["__BINARY_DESCRIPTIONS__"] = summary
+    bxa_mail = utils.TemplateSubst(upload.Subst,Config()["Dir::Templates"]+"/process-new.bxa_notification")
     utils.send_mail(bxa_mail)
 
 ################################################################################
 
-def add_overrides (new):
-    changes = Upload.pkg.changes
-    files = Upload.pkg.files
+def add_overrides (new, upload, session):
+    changes = upload.pkg.changes
+    files = upload.pkg.files
     srcpkg = changes.get("source")
 
-    projectB.query("BEGIN WORK")
     for suite in changes["suite"].keys():
-        suite_id = database.get_suite_id(suite)
+        suite_id = get_suite(suite).suite_id
         for pkg in new.keys():
-            component_id = database.get_component_id(new[pkg]["component"])
-            type_id = database.get_override_type_id(new[pkg]["type"])
+            component_id = get_component(new[pkg]["component"]).component_id
+            type_id = get_override_type(new[pkg]["type"]).overridetype_id
             priority_id = new[pkg]["priority id"]
             section_id = new[pkg]["section id"]
             Logger.log(["%s overrides" % (srcpkg), suite, new[pkg]["component"], new[pkg]["type"], new[pkg]["priority"], new[pkg]["section"]])
-            projectB.query("INSERT INTO override (suite, component, type, package, priority, section, maintainer) VALUES (%s, %s, %s, '%s', %s, %s, '')" % (suite_id, component_id, type_id, pkg, priority_id, section_id))
+            session.execute("INSERT INTO override (suite, component, type, package, priority, section, maintainer) VALUES (:sid, :cid, :tid, :pkg, :pid, :sectid, '')",
+                            { 'sid': suite_id, 'cid': component_id, 'tid':type_id, 'pkg': pkg, 'pid': priority_id, 'sectid': section_id})
             for f in new[pkg]["files"]:
                 if files[f].has_key("new"):
                     del files[f]["new"]
             del new[pkg]
 
-    projectB.query("COMMIT WORK")
+    session.commit()
 
-    if Cnf.FindB("Dinstall::BXANotify"):
-        do_bxa_notification()
+    if Config().FindB("Dinstall::BXANotify"):
+        do_bxa_notification(upload)
 
 ################################################################################
 
-def prod_maintainer (note):
+def prod_maintainer (note, upload):
+    cnf = Config()
     # Here we prepare an editor and get them ready to prod...
     (fd, temp_filename) = utils.temp_filename()
     temp_file = os.fdopen(fd, 'w')
@@ -611,39 +607,40 @@ def prod_maintainer (note):
         sys.exit(0)
     # Otherwise, do the proding...
     user_email_address = utils.whoami() + " <%s>" % (
-        Cnf["Dinstall::MyAdminAddress"])
+        cnf["Dinstall::MyAdminAddress"])
 
-    Subst = Upload.Subst
+    Subst = upload.Subst
 
     Subst["__FROM_ADDRESS__"] = user_email_address
     Subst["__PROD_MESSAGE__"] = prod_message
-    Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
+    Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
 
     prod_mail_message = utils.TemplateSubst(
-        Subst,Cnf["Dir::Templates"]+"/process-new.prod")
+        Subst,cnf["Dir::Templates"]+"/process-new.prod")
 
     # Send the prod mail if appropriate
-    if not Cnf["Dinstall::Options::No-Mail"]:
+    if not cnf["Dinstall::Options::No-Mail"]:
         utils.send_mail(prod_mail_message)
 
     print "Sent proding message"
 
 ################################################################################
 
-def do_new():
+def do_new(upload, session):
     print "NEW\n"
-    files = Upload.pkg.files
-    changes = Upload.pkg.changes
+    files = upload.pkg.files
+    changes = upload.pkg.changes
+    cnf = Config()
 
     # Make a copy of distribution we can happily trample on
     changes["suite"] = copy.copy(changes["distribution"])
 
     # Fix up the list of target suites
     for suite in changes["suite"].keys():
-        override = Cnf.Find("Suite::%s::OverrideSuite" % (suite))
+        override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
         if override:
-            (olderr, newerr) = (database.get_suite_id(suite) == -1,
-              database.get_suite_id(override) == -1)
+            (olderr, newerr) = (get_suite(suite, session) == None,
+                                get_suite(override, session) == None)
             if olderr or newerr:
                 (oinv, newinv) = ("", "")
                 if olderr: oinv = "invalid "
@@ -654,15 +651,14 @@ def do_new():
             changes["suite"][override] = 1
     # Validate suites
     for suite in changes["suite"].keys():
-        suite_id = database.get_suite_id(suite)
-        if suite_id == -1:
+        if get_suite(suite, session) is None:
             utils.fubar("%s has invalid suite '%s' (possibly overriden).  say wha?" % (changes, suite))
 
     # The main NEW processing loop
     done = 0
     while not done:
         # Find out what's new
-        new = queue.determine_new(changes, files, projectB)
+        new = determine_new(changes, files)
 
         if not new:
             break
@@ -671,7 +667,7 @@ def do_new():
         if Options["No-Action"] or Options["Automatic"]:
             answer = 'S'
 
-        (broken, note) = print_new(new, 0)
+        (broken, note) = print_new(new, upload, indexed=0)
         prompt = ""
 
         if not broken and not note:
@@ -691,39 +687,50 @@ def do_new():
                 answer = m.group(1)
             answer = answer[:1].upper()
 
+        if answer in ( 'A', 'E', 'M', 'O', 'R' ) and Options["Trainee"]:
+            utils.warn("Trainees can't do that")
+            continue
+
         if answer == 'A' and not Options["Trainee"]:
             try:
                 check_daily_lock()
-                done = add_overrides (new)
-                Logger.log([utils.getusername(), "NEW ACCEPT: %s" % (Upload.pkg.changes_file)])
+                done = add_overrides (new, upload, session)
+                Logger.log(["NEW ACCEPT: %s" % (upload.pkg.changes_file)])
             except CantGetLockError:
                 print "Hello? Operator! Give me the number for 911!"
                 print "Dinstall in the locked area, cant process packages, come back later"
         elif answer == 'C':
-            check_pkg()
+            check_pkg(upload)
         elif answer == 'E' and not Options["Trainee"]:
-            new = edit_overrides (new)
+            new = edit_overrides (new, upload, session)
         elif answer == 'M' and not Options["Trainee"]:
-            aborted = Upload.do_reject(manual=1,
+            aborted = upload.do_reject(manual=1,
                                        reject_message=Options["Manual-Reject"],
-                                       note=database.get_new_comments(changes.get("source", "")))
+                                       note=get_new_comments(changes.get("source", ""), session=session))
             if not aborted:
-                Logger.log([utils.getusername(), "NEW REJECT: %s" % (Upload.pkg.changes_file)])
-                os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+                Logger.log(["NEW REJECT: %s" % (upload.pkg.changes_file)])
+                os.unlink(upload.pkg.changes_file[:-8]+".dak")
                 done = 1
         elif answer == 'N':
-            edit_note(database.get_new_comments(changes.get("source", "")))
+            edit_note(get_new_comments(changes.get("source", ""), session=session),
+                      upload, session)
         elif answer == 'P' and not Options["Trainee"]:
-            prod_maintainer(database.get_new_comments(changes.get("source", "")))
-            Logger.log([utils.getusername(), "NEW PROD: %s" % (Upload.pkg.changes_file)])
+            prod_maintainer(get_new_comments(changes.get("source", ""), session=session),
+                            upload)
+            Logger.log(["NEW PROD: %s" % (upload.pkg.changes_file)])
         elif answer == 'R' and not Options["Trainee"]:
             confirm = utils.our_raw_input("Really clear note (y/N)? ").lower()
             if confirm == "y":
-                database.delete_new_comments(changes.get("source"), changes.get("version"))
+                for c in get_new_comments(changes.get("source", ""), changes.get("version", ""), session=session):
+                    session.delete(c)
+                session.commit()
         elif answer == 'O' and not Options["Trainee"]:
             confirm = utils.our_raw_input("Really clear all notes (y/N)? ").lower()
             if confirm == "y":
-                database.delete_all_new_comments(changes.get("source"))
+                for c in get_new_comments(changes.get("source", ""), session=session):
+                    session.delete(c)
+                session.commit()
+
         elif answer == 'S':
             done = 1
         elif answer == 'Q':
@@ -747,53 +754,10 @@ def usage (exit_code=0):
 
 ################################################################################
 
-def init():
-    global Cnf, Options, Logger, Upload, projectB, Sections, Priorities
-
-    Cnf = utils.get_conf()
-
-    Arguments = [('a',"automatic","Process-New::Options::Automatic"),
-                 ('h',"help","Process-New::Options::Help"),
-                 ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
-                 ('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
-                 ('t',"trainee","Process-New::Options::Trainee"),
-                 ('n',"no-action","Process-New::Options::No-Action")]
-
-    for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
-        if not Cnf.has_key("Process-New::Options::%s" % (i)):
-            Cnf["Process-New::Options::%s" % (i)] = ""
-
-    changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
-    if len(changes_files) == 0 and not Cnf.get("Process-New::Options::Comments-Dir",""):
-        changes_files = utils.get_changes_files(Cnf["Dir::Queue::New"])
-
-    Options = Cnf.SubTree("Process-New::Options")
-
-    if Options["Help"]:
-        usage()
-
-    Upload = queue.Upload(Cnf)
-
-    if not Options["No-Action"]:
-        try:
-            Logger = Upload.Logger = daklog.Logger(Cnf, "process-new")
-        except CantOpenError, e:
-            Options["Trainee"] = "True"
-
-    projectB = Upload.projectB
-
-    Sections = Section_Completer()
-    Priorities = Priority_Completer()
-    readline.parse_and_bind("tab: complete")
-
-    return changes_files
-
-################################################################################
-
-def do_byhand():
+def do_byhand(upload, session):
     done = 0
     while not done:
-        files = Upload.pkg.files
+        files = upload.pkg.files
         will_install = 1
         byhand = []
 
@@ -828,14 +792,14 @@ def do_byhand():
                 done = 1
                 for f in byhand:
                     del files[f]
-                Logger.log([utils.getusername(), "BYHAND ACCEPT: %s" % (Upload.pkg.changes_file)])
+                Logger.log(["BYHAND ACCEPT: %s" % (upload.pkg.changes_file)])
             except CantGetLockError:
                 print "Hello? Operator! Give me the number for 911!"
                 print "Dinstall in the locked area, cant process packages, come back later"
         elif answer == 'M':
-            Logger.log([utils.getusername(), "BYHAND REJECT: %s" % (Upload.pkg.changes_file)])
-            Upload.do_reject(1, Options["Manual-Reject"])
-            os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+            Logger.log(["BYHAND REJECT: %s" % (upload.pkg.changes_file)])
+            upload.do_reject(manual=1, reject_message=Options["Manual-Reject"])
+            os.unlink(upload.pkg.changes_file[:-8]+".dak")
             done = 1
         elif answer == 'S':
             done = 1
@@ -850,13 +814,15 @@ def check_daily_lock():
     Raises CantGetLockError if the dinstall daily.lock exists.
     """
 
+    cnf = Config()
     try:
-        os.open(Cnf["Process-New::DinstallLockFile"],  os.O_RDONLY | os.O_CREAT | os.O_EXCL)
+        os.open(cnf["Process-New::DinstallLockFile"],
+                os.O_RDONLY | os.O_CREAT | os.O_EXCL)
     except OSError, e:
         if e.errno == errno.EEXIST or e.errno == errno.EACCES:
             raise CantGetLockError
 
-    os.unlink(Cnf["Process-New::DinstallLockFile"])
+    os.unlink(cnf["Process-New::DinstallLockFile"])
 
 
 @contextlib.contextmanager
@@ -868,7 +834,7 @@ def lock_package(package):
     @param package: source package name to lock
     """
 
-    path = os.path.join(Cnf["Process-New::LockDir"], package)
+    path = os.path.join(Config()["Process-New::LockDir"], package)
     try:
         fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDONLY)
     except OSError, e:
@@ -881,115 +847,118 @@ def lock_package(package):
     finally:
         os.unlink(path)
 
-def move_to_dir (dest, perms=0660, changesperms=0664):
-    utils.move (Upload.pkg.changes_file, dest, perms=changesperms)
-    file_keys = Upload.pkg.files.keys()
-    for f in file_keys:
-        utils.move (f, dest, perms=perms)
-
-def is_source_in_queue_dir(qdir):
-    entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
-                and x.endswith(".changes") ]
-    for entry in entries:
-        # read the .dak
-        u = queue.Upload(Cnf)
-        u.pkg.changes_file = os.path.join(qdir, entry)
-        u.update_vars()
-        if not u.pkg.changes["architecture"].has_key("source"):
-            # another binary upload, ignore
-            continue
-        if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
-            # another version, ignore
-            continue
-        # found it!
-        return True
-    return False
-
-def move_to_holding(suite, queue_dir):
-    print "Moving to %s holding area." % (suite.upper(),)
-    if Options["No-Action"]:
-       return
-    Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
-    Upload.dump_vars(queue_dir)
-    move_to_dir(queue_dir, perms=0664)
-    os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-def _accept():
+# def move_to_dir (upload, dest, perms=0660, changesperms=0664):
+#     utils.move (upload.pkg.changes_file, dest, perms=changesperms)
+#     file_keys = upload.pkg.files.keys()
+    for f in file_keys:
+        utils.move (f, dest, perms=perms)
+
+def is_source_in_queue_dir(qdir):
+    entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
+                and x.endswith(".changes") ]
+    for entry in entries:
+        # read the .dak
+        u = queue.Upload(Cnf)
+        u.pkg.changes_file = os.path.join(qdir, entry)
+        u.update_vars()
+        if not u.pkg.changes["architecture"].has_key("source"):
+            # another binary upload, ignore
+            continue
+        if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
+            # another version, ignore
+            continue
+        # found it!
+        return True
+    return False
+
+def move_to_holding(suite, queue_dir):
+    print "Moving to %s holding area." % (suite.upper(),)
+    if Options["No-Action"]:
+#      return
+    Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
+    Upload.dump_vars(queue_dir)
+    move_to_dir(queue_dir, perms=0664)
+    os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+
+def _accept(upload):
     if Options["No-Action"]:
         return
-    (summary, short_summary) = Upload.build_summaries()
-    Upload.accept(summary, short_summary, targetdir=Cnf["Dir::Queue::Newstage"])
-    os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-def do_accept_stableupdate(suite, q):
-    queue_dir = Cnf["Dir::Queue::%s" % (q,)]
-    if not Upload.pkg.changes["architecture"].has_key("source"):
-        # It is not a sourceful upload.  So its source may be either in p-u
-        # holding, in new, in accepted or already installed.
-        if is_source_in_queue_dir(queue_dir):
-            # It's in p-u holding, so move it there.
-            print "Binary-only upload, source in %s." % (q,)
-            move_to_holding(suite, queue_dir)
-        elif Upload.source_exists(Upload.pkg.changes["source"],
-                Upload.pkg.changes["version"]):
-            # dak tells us that there is source available.  At time of
-            # writing this means that it is installed, so put it into
-            # accepted.
-            print "Binary-only upload, source installed."
-            Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-            _accept()
-        elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
-            # The source is in accepted, the binary cleared NEW: accept it.
-            print "Binary-only upload, source in accepted."
-            Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-            _accept()
-        elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
-            # It's in NEW.  We expect the source to land in p-u holding
-            # pretty soon.
-            print "Binary-only upload, source in new."
-            move_to_holding(suite, queue_dir)
-        elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
-            # It's in newstage.  Accept into the holding area
-            print "Binary-only upload, source in newstage."
-            Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-            _accept()
-        else:
-            # No case applicable.  Bail out.  Return will cause the upload
-            # to be skipped.
-            print "ERROR"
-            print "Stable update failed.  Source not found."
-            return
-    else:
-        # We are handling a sourceful upload.  Move to accepted if currently
-        # in p-u holding and to p-u holding otherwise.
-        if is_source_in_queue_dir(queue_dir):
-            print "Sourceful upload in %s, accepting." % (q,)
-            _accept()
-        else:
-            move_to_holding(suite, queue_dir)
-
-def do_accept():
+    (summary, short_summary) = upload.build_summaries()
+    upload.accept(summary, short_summary, targetdir=Config()["Dir::Queue::Newstage"])
+    os.unlink(upload.pkg.changes_file[:-8]+".dak")
+
+# def do_accept_stableupdate(upload,suite, q):
+#     cnf = Config()
+#     queue_dir = cnf["Dir::Queue::%s" % (q,)]
+#     if not upload.pkg.changes["architecture"].has_key("source"):
+#         # It is not a sourceful upload.  So its source may be either in p-u
+#         # holding, in new, in accepted or already installed.
+#         if is_source_in_queue_dir(queue_dir):
+#             # It's in p-u holding, so move it there.
+#             print "Binary-only upload, source in %s." % (q,)
+#             move_to_holding(suite, queue_dir)
+#         elif Upload.source_exists(Upload.pkg.changes["source"],
+#                 Upload.pkg.changes["version"]):
+#             # dak tells us that there is source available.  At time of
+#             # writing this means that it is installed, so put it into
+#             # accepted.
+#             print "Binary-only upload, source installed."
+#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
+#             _accept()
+#         elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
+#             # The source is in accepted, the binary cleared NEW: accept it.
+#             print "Binary-only upload, source in accepted."
+#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
+#             _accept()
+#         elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
+#             # It's in NEW.  We expect the source to land in p-u holding
+#             # pretty soon.
+#             print "Binary-only upload, source in new."
+#             move_to_holding(suite, queue_dir)
+#         elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
+#             # It's in newstage.  Accept into the holding area
+#             print "Binary-only upload, source in newstage."
+#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
+#             _accept()
+#         else:
+#             # No case applicable.  Bail out.  Return will cause the upload
+#             # to be skipped.
+#             print "ERROR"
+#             print "Stable update failed.  Source not found."
+#             return
+#     else:
+#         # We are handling a sourceful upload.  Move to accepted if currently
+#         # in p-u holding and to p-u holding otherwise.
+#         if is_source_in_queue_dir(queue_dir):
+#             print "Sourceful upload in %s, accepting." % (q,)
+#             _accept()
+#         else:
+#             move_to_holding(suite, queue_dir)
+
+def do_accept(upload):
     print "ACCEPT"
+    cnf = Config()
     if not Options["No-Action"]:
-        (summary, short_summary) = Upload.build_summaries()
-    if Cnf.FindB("Dinstall::SecurityQueueHandling"):
-        Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
-        move_to_dir(Cnf["Dir::Queue::Embargoed"])
-        Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
-        # Check for override disparities
-        Upload.Subst["__SUMMARY__"] = summary
-    else:
+        (summary, short_summary) = upload.build_summaries()
+#     if cnf.FindB("Dinstall::SecurityQueueHandling"):
+#         upload.dump_vars(cnf["Dir::Queue::Embargoed"])
+#         move_to_dir(cnf["Dir::Queue::Embargoed"])
+#         upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
+        # Check for override disparities
+#         upload.Subst["__SUMMARY__"] = summary
+    else:
         # Stable updates need to be copied to proposed-updates holding
         # area instead of accepted.  Sourceful uploads need to go
         # to it directly, binaries only if the source has not yet been
         # accepted into p-u.
         for suite, q in [("proposed-updates", "ProposedUpdates"),
                 ("oldstable-proposed-updates", "OldProposedUpdates")]:
-            if not Upload.pkg.changes["distribution"].has_key(suite):
+            if not upload.pkg.changes["distribution"].has_key(suite):
                 continue
-            return do_accept_stableupdate(suite, q)
+            utils.fubar("stable accept not supported yet")
+#            return do_accept_stableupdate(suite, q)
         # Just a normal upload, accept it...
-        _accept()
+        _accept(upload)
 
 def check_status(files):
     new = byhand = 0
@@ -1000,30 +969,37 @@ def check_status(files):
             new = 1
     return (new, byhand)
 
-def do_pkg(changes_file):
-    Upload.pkg.changes_file = changes_file
-    Upload.init_vars()
-    Upload.update_vars()
-    Upload.update_subst()
-    files = Upload.pkg.files
+def do_pkg(changes_file, session):
+    u = Upload()
+    u.pkg.load_dot_dak(changes_file)
+    u.update_subst()
+
+    cnf = Config()
+    bcc = "X-DAK: dak process-new\nX-Katie: lisa $Revision: 1.31 $"
+    if cnf.has_key("Dinstall::Bcc"):
+        u.Subst["__BCC__"] = bcc + "\nBcc: %s" % (cnf["Dinstall::Bcc"])
+    else:
+        u.Subst["__BCC__"] = bcc
+
+    files = u.pkg.files
 
     try:
-        with lock_package(Upload.pkg.changes["source"]):
-            if not recheck():
+        with lock_package(u.pkg.changes["source"]):
+            if not recheck(u, session):
                 return
 
             (new, byhand) = check_status(files)
             if new or byhand:
                 if new:
-                    do_new()
+                    do_new(u, session)
                 if byhand:
-                    do_byhand()
+                    do_byhand(u, session)
                 (new, byhand) = check_status(files)
 
             if not new and not byhand:
                 try:
                     check_daily_lock()
-                    do_accept()
+                    do_accept(u)
                 except CantGetLockError:
                     print "Hello? Operator! Give me the number for 911!"
                     print "Dinstall in the locked area, cant process packages, come back later"
@@ -1041,94 +1017,124 @@ def end():
         if accept_count > 1:
             sets = "sets"
         sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, utils.size_type(int(accept_bytes))))
-        Logger.log([utils.getusername(), "total",accept_count,accept_bytes])
+        Logger.log(["total",accept_count,accept_bytes])
 
     if not Options["No-Action"] and not Options["Trainee"]:
         Logger.close()
 
 ################################################################################
 
-def do_comments(dir, opref, npref, line, fn):
-    for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
-        lines = open("%s/%s" % (dir, comm)).readlines()
-        if len(lines) == 0 or lines[0] != line + "\n": continue
-        changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
-                                and x.endswith(".changes") ]
-        changes_files = sort_changes(changes_files)
-        for f in changes_files:
-            f = utils.validate_changes_file_arg(f, 0)
-            if not f: continue
-            print "\n" + f
-            fn(f, "".join(lines[1:]))
-
-        if opref != npref and not Options["No-Action"]:
-            newcomm = npref + comm[len(opref):]
-            os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
+# def do_comments(dir, opref, npref, line, fn):
+#     for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
+#         lines = open("%s/%s" % (dir, comm)).readlines()
+#         if len(lines) == 0 or lines[0] != line + "\n": continue
+#         changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
+#                                 and x.endswith(".changes") ]
+#         changes_files = sort_changes(changes_files)
+#         for f in changes_files:
+#             f = utils.validate_changes_file_arg(f, 0)
+#             if not f: continue
+#             print "\n" + f
+#             fn(f, "".join(lines[1:]))
+
+#         if opref != npref and not Options["No-Action"]:
+#             newcomm = npref + comm[len(opref):]
+#             os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
+
+# ################################################################################
+
+# def comment_accept(changes_file, comments):
+#     Upload.pkg.changes_file = changes_file
+#     Upload.init_vars()
+#     Upload.update_vars()
+#     Upload.update_subst()
+#     files = Upload.pkg.files
+
+#     if not recheck():
+#         return # dak wants to REJECT, crap
+
+#     (new, byhand) = check_status(files)
+#     if not new and not byhand:
+#         do_accept()
+
+# ################################################################################
+
+# def comment_reject(changes_file, comments):
+#     Upload.pkg.changes_file = changes_file
+#     Upload.init_vars()
+#     Upload.update_vars()
+#     Upload.update_subst()
+
+#     if not recheck():
+#         pass # dak has its own reasons to reject as well, which is fine
+
+#     reject(comments)
+#     print "REJECT\n" + reject_message,
+#     if not Options["No-Action"]:
+#         Upload.do_reject(0, reject_message)
+#         os.unlink(Upload.pkg.changes_file[:-8]+".dak")
 
 ################################################################################
 
-def comment_accept(changes_file, comments):
-    Upload.pkg.changes_file = changes_file
-    Upload.init_vars()
-    Upload.update_vars()
-    Upload.update_subst()
-    files = Upload.pkg.files
+def main():
+    global Options, Logger, Sections, Priorities
 
-    if not recheck():
-        return # dak wants to REJECT, crap
+    cnf = Config()
+    session = DBConn().session()
 
-    (new, byhand) = check_status(files)
-    if not new and not byhand:
-        do_accept()
+    Arguments = [('a',"automatic","Process-New::Options::Automatic"),
+                 ('h',"help","Process-New::Options::Help"),
+                 ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
+                 ('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
+                 ('t',"trainee","Process-New::Options::Trainee"),
+                 ('n',"no-action","Process-New::Options::No-Action")]
 
-################################################################################
+    for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
+        if not cnf.has_key("Process-New::Options::%s" % (i)):
+            cnf["Process-New::Options::%s" % (i)] = ""
 
-def comment_reject(changes_file, comments):
-    Upload.pkg.changes_file = changes_file
-    Upload.init_vars()
-    Upload.update_vars()
-    Upload.update_subst()
+    changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv)
+    if len(changes_files) == 0 and not cnf.get("Process-New::Options::Comments-Dir",""):
+        changes_files = utils.get_changes_files(cnf["Dir::Queue::New"])
 
-    if not recheck():
-        pass # dak has its own reasons to reject as well, which is fine
+    Options = cnf.SubTree("Process-New::Options")
+
+    if Options["Help"]:
+        usage()
 
-    reject(comments)
-    print "REJECT\n" + reject_message,
     if not Options["No-Action"]:
-        Upload.do_reject(0, reject_message)
-        os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+        try:
+            Logger = daklog.Logger(cnf, "process-new")
+        except CantOpenError, e:
+            Options["Trainee"] = "True"
 
-################################################################################
+    Sections = Section_Completer(session)
+    Priorities = Priority_Completer(session)
+    readline.parse_and_bind("tab: complete")
 
-def main():
-    changes_files = init()
-    if len(changes_files) > 50:
+    if len(changes_files) > 1:
         sys.stderr.write("Sorting changes...\n")
-    changes_files = sort_changes(changes_files)
+    changes_files = sort_changes(changes_files, session)
 
     # Kill me now? **FIXME**
-    Cnf["Dinstall::Options::No-Mail"] = ""
-    bcc = "X-DAK: dak process-new\nX-Katie: lisa $Revision: 1.31 $"
-    if Cnf.has_key("Dinstall::Bcc"):
-        Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
-    else:
-        Upload.Subst["__BCC__"] = bcc
-
-    commentsdir = Cnf.get("Process-New::Options::Comments-Dir","")
-    if commentsdir:
-        if changes_files != []:
-            sys.stderr.write("Can't specify any changes files if working with comments-dir")
-            sys.exit(1)
-        do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
-        do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
-    else:
+    cnf["Dinstall::Options::No-Mail"] = ""
+
+#     commentsdir = cnf.get("Process-New::Options::Comments-Dir","")
+#     if commentsdir:
+#        if changes_files != []:
+#            sys.stderr.write("Can't specify any changes files if working with comments-dir")
+#            sys.exit(1)
+#        do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
+#        do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
+#     else:
+    if True:
         for changes_file in changes_files:
             changes_file = utils.validate_changes_file_arg(changes_file, 0)
             if not changes_file:
                 continue
             print "\n" + changes_file
 
-            do_pkg (changes_file)
+            do_pkg (changes_file, session)
 
     end()
 
index 70c3cdbae295de36f0026c54a49c31bca8bbdf4a..0645dd9f86d9fd45e633326c1626a8855aac46d8 100755 (executable)
@@ -34,23 +34,14 @@ Checks Debian packages from Incoming
 
 ################################################################################
 
-import commands
 import errno
 import fcntl
 import os
-import re
-import shutil
-import stat
 import sys
-import time
 import traceback
-import tarfile
-import apt_inst
 import apt_pkg
-from debian_bundle import deb822
 
 from daklib.dbconn import *
-from daklib.binary import Binary
 from daklib import daklog
 from daklib.queue import *
 from daklib import utils
index c60358f0446c7a413d38b9fcf703cba60a4ff884..8e338e526181c9d5d16dd4737ae7b36e6dce725e 100755 (executable)
@@ -37,7 +37,6 @@
 from copy import copy
 import glob, os, stat, sys, time
 import apt_pkg
-import cgi
 
 from daklib import utils
 from daklib.changes import Changes
index 8ed03412293f1d946976daa06b855bc394a7f7c4..fe763605f545ddff7748a2a7dad6d7071ebc44ab 100755 (executable)
--- a/dak/rm.py
+++ b/dak/rm.py
@@ -41,7 +41,6 @@
 
 import commands
 import os
-import re
 import sys
 import apt_pkg
 import apt_inst
index d3cf65302da6c6c6ee8a030c17b49eea7c24ed09..e8e1621d2f957cb51c861329e12377e6e780dff1 100755 (executable)
@@ -22,7 +22,6 @@
 
 import sys, os, re, time
 import apt_pkg
-import tempfile
 from debian_bundle import deb822
 from daklib.dbconn import *
 from daklib import utils
index be3d51147130f83675dd2cdbe70b681425e32393..b21efcce43ac336797034dc84bcdf2743bcc15c2 100755 (executable)
@@ -32,6 +32,7 @@ import examine_package
 
 from daklib.queue import determine_new, check_valid
 from daklib import utils
+from daklib.regexes import re_source_ext
 
 # Globals
 Cnf = None
@@ -160,8 +161,9 @@ def do_pkg(changes_file):
         filestoexamine = []
         for pkg in new.keys():
             for fn in new[pkg]["files"]:
-                if ( c.files[fn].has_key("new") and not
-                     c.files[fn]["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2"] ):
+                if (c.files[fn].has_key("new") and
+                    (c.files[fn]["type"] == "dsc" or
+                     not re_source_ext.match(c.files[fn]["type"]))):
                     filestoexamine.append(fn)
 
         html_header(c.changes["source"], filestoexamine)
index 583178b28a01d89b660dfc15ef67b089b3fd422e..7c61e2a42948fa58e2bce7a0d50493d981ebfee0 100755 (executable)
@@ -34,8 +34,7 @@ import sys
 import apt_pkg
 
 from daklib import utils
-from daklib.dbconn import DBConn, get_suite_architectures, Suite, Architecture, \
-                          BinAssociation
+from daklib.dbconn import DBConn, get_suite_architectures, Suite, Architecture
 
 ################################################################################
 
index acae2072a10e6078f520a43c886d8bb5e2ef7448..4c4ac78ee8a0ed5cb4ad44d9deaf3894a66a87fb 100755 (executable)
@@ -34,7 +34,6 @@ import time
 import errno
 import fcntl
 import tempfile
-import pwd
 import apt_pkg
 
 from daklib.dbconn import *
index 4999af3a4e445fb4f7b7d5767c7e5343fca926f6..ecf5cd2a80ac56589202f5a1d3bb4ebcbd68cd72 100755 (executable)
@@ -44,7 +44,7 @@ from daklib.dak_exceptions import DBUpdateError
 ################################################################################
 
 Cnf = None
-required_database_schema = 14
+required_database_schema = 15
 
 ################################################################################
 
index 1bb90753601bd9f3b0e220363b39e7aeac6932a9..59c7da17d15da775e7a3e4d843f4f643e3894210 100755 (executable)
@@ -76,6 +76,10 @@ CHANGESFIELDS_DSCFILES_OPTIONAL = [ "files id" ]
 
 __all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
 
+CHANGESFIELDS_ORIGFILES = [ "id", "location" ]
+
+__all__.append('CHANGESFIELDS_ORIGFILES')
+
 ###############################################################################
 
 class Changes(object):
@@ -91,10 +95,7 @@ class Changes(object):
         self.dsc = {}
         self.files = {}
         self.dsc_files = {}
-
-        self.orig_tar_id = None
-        self.orig_tar_location = ""
-        self.orig_tar_gz = None
+        self.orig_files = {}
 
     def file_summary(self):
         # changes["distribution"] may not exist in corner cases
@@ -189,8 +190,24 @@ class Changes(object):
         self.files.update(p.load())
         self.dsc_files.update(p.load())
 
-        self.orig_tar_id = p.load()
-        self.orig_tar_location = p.load()
+        next_obj = p.load()
+        if type(next_obj) is DictType:
+            self.pkg.orig_files.update(next_obj)
+        else:
+            # Auto-convert old dak files to new format supporting
+            # multiple tarballs
+            orig_tar_gz = None
+            for dsc_file in self.dsc_files.keys():
+                if dsc_file.endswith(".orig.tar.gz"):
+                    orig_tar_gz = dsc_file
+            self.orig_files[orig_tar_gz] = {}
+            if next_obj != None:
+                self.orig_files[orig_tar_gz]["id"] = next_obj
+            next_obj = p.load()
+            if next_obj != None and next_obj != "":
+                self.orig_files[orig_tar_gz]["location"] = next_obj
+            if len(self.orig_files[orig_tar_gz]) == 0:
+                del self.orig_files[orig_tar_gz]
 
         dump_file.close()
 
@@ -240,6 +257,17 @@ class Changes(object):
 
         return ret
 
+    def sanitised_orig_files(self):
+        ret = {}
+        for name, entry in self.orig_files.items():
+            ret[name] = {}
+            # Optional orig_files fields
+            for i in CHANGESFIELDS_ORIGFILES:
+                if entry.has_key(i):
+                    ret[name][i] = entry[i]
+
+        return ret
+
     def write_dot_dak(self, dest_dir):
         """
         Dump ourself into a cPickle file.
@@ -281,8 +309,7 @@ class Changes(object):
         p.dump(self.sanitised_dsc())
         p.dump(self.sanitised_files())
         p.dump(self.sanitised_dsc_files())
-        p.dump(self.orig_tar_id)
-        p.dump(self.orig_tar_location)
+        p.dump(self.sanitised_orig_files())
 
         dump_file.close()
 
index 0cca205e96bf83b6e1cad2c7f785e650c814b848..dfcae368089f0c1d2a0d2e56b9e6e8fa218c6d6a 100755 (executable)
@@ -58,13 +58,12 @@ class Logger:
             logfile = utils.open_file(logfilename, 'a')
             os.umask(umask)
         self.logfile = logfile
-        # Log the start of the program
-        user = pwd.getpwuid(os.getuid())[0]
-        self.log(["program start", user])
+        self.log(["program start"])
 
     def log (self, details):
         "Log an event"
-        # Prepend the timestamp and program name
+        # Prepend timestamp, program name, and user name
+        details.insert(0, utils.getusername())
         details.insert(0, self.program)
         timestamp = time.strftime("%Y%m%d%H%M%S")
         details.insert(0, timestamp)
index f5a19c1236442360ca57b7b867eed9f24f5f5003..adecc38a7f060d6c9876be399e6d6b4df3fb3034 100755 (executable)
@@ -59,21 +59,45 @@ __all__ = ['IntegrityError', 'SQLAlchemyError']
 ################################################################################
 
 def session_wrapper(fn):
+    """
+    Wrapper around common ".., session=None):" handling. If the wrapped
+    function is called without passing 'session', we create a local one
+    and destroy it when the function ends.
+
+    Also attaches a commit_or_flush method to the session; if we created a
+    local session, this is a synonym for session.commit(), otherwise it is a
+    synonym for session.flush().
+    """
+
     def wrapped(*args, **kwargs):
         private_transaction = False
+
+        # Find the session object
         session = kwargs.get('session')
 
-        # No session specified as last argument or in kwargs, create one.
-        if session is None and len(args) <= len(getargspec(fn)[0]) - 1:
-            private_transaction = True
-            kwargs['session'] = DBConn().session()
+        if session is None:
+            if len(args) <= len(getargspec(fn)[0]) - 1:
+                # No session specified as last argument or in kwargs
+                private_transaction = True
+                session = kwargs['session'] = DBConn().session()
+            else:
+                # Session is last argument in args
+                session = args[-1]
+                if session is None:
+                    session = args[-1] = DBConn().session()
+                    private_transaction = True
+
+        if private_transaction:
+            session.commit_or_flush = session.commit
+        else:
+            session.commit_or_flush = session.flush
 
         try:
             return fn(*args, **kwargs)
         finally:
             if private_transaction:
                 # We created a session; close it.
-                kwargs['session'].close()
+                session.close()
 
     wrapped.__doc__ = fn.__doc__
     wrapped.func_name = fn.func_name
@@ -168,7 +192,7 @@ __all__.append('Archive')
 @session_wrapper
 def get_archive(archive, session=None):
     """
-    returns database id for given c{archive}.
+    returns database id for given C{archive}.
 
     @type archive: string
     @param archive: the name of the arhive
@@ -419,6 +443,7 @@ class ContentFilename(object):
 
 __all__.append('ContentFilename')
 
+@session_wrapper
 def get_or_set_contents_file_id(filename, session=None):
     """
     Returns database id for given filename.
@@ -435,10 +460,6 @@ def get_or_set_contents_file_id(filename, session=None):
     @rtype: int
     @return: the database id for the given component
     """
-    privatetrans = False
-    if session is None:
-        session = DBConn().session()
-        privatetrans = True
 
     q = session.query(ContentFilename).filter_by(filename=filename)
 
@@ -448,15 +469,9 @@ def get_or_set_contents_file_id(filename, session=None):
         cf = ContentFilename()
         cf.filename = filename
         session.add(cf)
-        if privatetrans:
-            session.commit()
-        else:
-            session.flush()
+        session.commit_or_flush()
         ret = cf.cafilename_id
 
-    if privatetrans:
-        session.close()
-
     return ret
 
 __all__.append('get_or_set_contents_file_id')
@@ -523,6 +538,7 @@ class ContentFilepath(object):
 
 __all__.append('ContentFilepath')
 
+@session_wrapper
 def get_or_set_contents_path_id(filepath, session=None):
     """
     Returns database id for given path.
@@ -539,10 +555,6 @@ def get_or_set_contents_path_id(filepath, session=None):
     @rtype: int
     @return: the database id for the given path
     """
-    privatetrans = False
-    if session is None:
-        session = DBConn().session()
-        privatetrans = True
 
     q = session.query(ContentFilepath).filter_by(filepath=filepath)
 
@@ -552,15 +564,9 @@ def get_or_set_contents_path_id(filepath, session=None):
         cf = ContentFilepath()
         cf.filepath = filepath
         session.add(cf)
-        if privatetrans:
-            session.commit()
-        else:
-            session.flush()
+        session.commit_or_flush()
         ret = cf.cafilepath_id
 
-    if privatetrans:
-        session.close()
-
     return ret
 
 __all__.append('get_or_set_contents_path_id')
@@ -820,6 +826,7 @@ class Fingerprint(object):
 
 __all__.append('Fingerprint')
 
+@session_wrapper
 def get_or_set_fingerprint(fpr, session=None):
     """
     Returns Fingerprint object for given fpr.
@@ -838,10 +845,6 @@ def get_or_set_fingerprint(fpr, session=None):
     @rtype: Fingerprint
     @return: the Fingerprint object for the given fpr
     """
-    privatetrans = False
-    if session is None:
-        session = DBConn().session()
-        privatetrans = True
 
     q = session.query(Fingerprint).filter_by(fingerprint=fpr)
 
@@ -851,15 +854,9 @@ def get_or_set_fingerprint(fpr, session=None):
         fingerprint = Fingerprint()
         fingerprint.fingerprint = fpr
         session.add(fingerprint)
-        if privatetrans:
-            session.commit()
-        else:
-            session.flush()
+        session.commit_or_flush()
         ret = fingerprint
 
-    if privatetrans:
-        session.close()
-
     return ret
 
 __all__.append('get_or_set_fingerprint')
@@ -875,6 +872,7 @@ class Keyring(object):
 
 __all__.append('Keyring')
 
+@session_wrapper
 def get_or_set_keyring(keyring, session=None):
     """
     If C{keyring} does not have an entry in the C{keyrings} table yet, create one
@@ -886,28 +884,17 @@ def get_or_set_keyring(keyring, session=None):
 
     @rtype: Keyring
     @return: the Keyring object for this keyring
-
     """
-    privatetrans = False
-    if session is None:
-        session = DBConn().session()
-        privatetrans = True
 
-    try:
-        obj = session.query(Keyring).filter_by(keyring_name=keyring).first()
-
-        if obj is None:
-            obj = Keyring(keyring_name=keyring)
-            session.add(obj)
-            if privatetrans:
-                session.commit()
-            else:
-                session.flush()
+    q = session.query(Keyring).filter_by(keyring_name=keyring)
 
+    try:
+        return q.one()
+    except NoResultFound:
+        obj = Keyring(keyring_name=keyring)
+        session.add(obj)
+        session.commit_or_flush()
         return obj
-    finally:
-        if privatetrans:
-            session.close()
 
 __all__.append('get_or_set_keyring')
 
@@ -973,6 +960,7 @@ class Maintainer(object):
 
 __all__.append('Maintainer')
 
+@session_wrapper
 def get_or_set_maintainer(name, session=None):
     """
     Returns Maintainer object for given maintainer name.
@@ -991,10 +979,6 @@ def get_or_set_maintainer(name, session=None):
     @rtype: Maintainer
     @return: the Maintainer object for the given maintainer
     """
-    privatetrans = False
-    if session is None:
-        session = DBConn().session()
-        privatetrans = True
 
     q = session.query(Maintainer).filter_by(name=name)
     try:
@@ -1003,19 +987,14 @@ def get_or_set_maintainer(name, session=None):
         maintainer = Maintainer()
         maintainer.name = name
         session.add(maintainer)
-        if privatetrans:
-            session.commit()
-        else:
-            session.flush()
+        session.commit_or_flush()
         ret = maintainer
 
-    if privatetrans:
-        session.close()
-
     return ret
 
 __all__.append('get_or_set_maintainer')
 
+@session_wrapper
 def get_maintainer(maintainer_id, session=None):
     """
     Return the name of the maintainer behind C{maintainer_id} or None if that
@@ -1028,16 +1007,7 @@ def get_maintainer(maintainer_id, session=None):
     @return: the Maintainer with this C{maintainer_id}
     """
 
-    privatetrans = False
-    if session is None:
-        session = DBConn().session()
-        privatetrans = True
-
-    try:
-        return session.query(Maintainer).get(maintainer_id)
-    finally:
-        if privatetrans:
-            session.close()
+    return session.query(Maintainer).get(maintainer_id)
 
 __all__.append('get_maintainer')
 
@@ -1443,23 +1413,26 @@ class Queue(object):
 
                 session.add(qb)
 
-            # If the .orig.tar.gz is in the pool, create a symlink to
-            # it (if one doesn't already exist)
-            if changes.orig_tar_id:
-                # Determine the .orig.tar.gz file name
-                for dsc_file in changes.dsc_files.keys():
-                    if dsc_file.endswith(".orig.tar.gz"):
-                        filename = dsc_file
-
-                dest = os.path.join(dest_dir, filename)
+            # If the .orig tarballs are in the pool, create a symlink to
+            # them (if one doesn't already exist)
+            for dsc_file in changes.dsc_files.keys():
+                # Skip all files except orig tarballs
+                if not re_is_orig_source.match(dsc_file):
+                    continue
+                # Skip orig files not identified in the pool
+                if not (changes.orig_files.has_key(dsc_file) and
+                        changes.orig_files[dsc_file].has_key("id")):
+                    continue
+                orig_file_id = changes.orig_files[dsc_file]["id"]
+                dest = os.path.join(dest_dir, dsc_file)
 
                 # If it doesn't exist, create a symlink
                 if not os.path.exists(dest):
                     q = session.execute("SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id",
-                                        {'id': changes.orig_tar_id})
+                                        {'id': orig_file_id})
                     res = q.fetchone()
                     if not res:
-                        return "[INTERNAL ERROR] Couldn't find id %s in files table." % (changes.orig_tar_id)
+                        return "[INTERNAL ERROR] Couldn't find id %s in files table." % (orig_file_id)
 
                     src = os.path.join(res[0], res[1])
                     os.symlink(src, dest)
@@ -1799,6 +1772,17 @@ __all__.append('SrcAssociation')
 
 ################################################################################
 
+class SrcFormat(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<SrcFormat %s>' % (self.format_name)
+
+__all__.append('SrcFormat')
+
+################################################################################
+
 class SrcUploader(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -1904,7 +1888,7 @@ def get_suite(suite, session=None):
     generated if not supplied)
 
     @rtype: Suite
-    @return: Suite object for the requested suite name (None if not presenT)
+    @return: Suite object for the requested suite name (None if not present)
     """
 
     q = session.query(Suite).filter_by(suite_name=suite)
@@ -1969,6 +1953,42 @@ __all__.append('get_suite_architectures')
 
 ################################################################################
 
+class SuiteSrcFormat(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
+
+__all__.append('SuiteSrcFormat')
+
+@session_wrapper
+def get_suite_src_formats(suite, session=None):
+    """
+    Returns list of allowed SrcFormat for C{suite}.
+
+    @type suite: str
+    @param suite: Suite name to search for
+
+    @type session: Session
+    @param session: Optional SQL session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: list
+    @return: the list of allowed source formats for I{suite}
+    """
+
+    q = session.query(SrcFormat)
+    q = q.join(SuiteSrcFormat)
+    q = q.join(Suite).filter_by(suite_name=suite)
+    q = q.order_by('format_name')
+
+    return q.all()
+
+__all__.append('get_suite_src_formats')
+
+################################################################################
+
 class Uid(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -1990,6 +2010,7 @@ class Uid(object):
 
 __all__.append('Uid')
 
+@session_wrapper
 def add_database_user(uidname, session=None):
     """
     Adds a database user
@@ -2006,19 +2027,12 @@ def add_database_user(uidname, session=None):
     @return: the uid object for the given uidname
     """
 
-    privatetrans = False
-    if session is None:
-        session = DBConn().session()
-        privatetrans = True
-
     session.execute("CREATE USER :uid", {'uid': uidname})
-
-    if privatetrans:
-        session.commit()
-        session.close()
+    session.commit_or_flush()
 
 __all__.append('add_database_user')
 
+@session_wrapper
 def get_or_set_uid(uidname, session=None):
     """
     Returns uid object for given uidname.
@@ -2037,11 +2051,6 @@ def get_or_set_uid(uidname, session=None):
     @return: the uid object for the given uidname
     """
 
-    privatetrans = False
-    if session is None:
-        session = DBConn().session()
-        privatetrans = True
-
     q = session.query(Uid).filter_by(uid=uidname)
 
     try:
@@ -2050,15 +2059,9 @@ def get_or_set_uid(uidname, session=None):
         uid = Uid()
         uid.uid = uidname
         session.add(uid)
-        if privatetrans:
-            session.commit()
-        else:
-            session.flush()
+        session.commit_or_flush()
         ret = uid
 
-    if privatetrans:
-        session.close()
-
     return ret
 
 __all__.append('get_or_set_uid')
@@ -2116,9 +2119,11 @@ class DBConn(Singleton):
         self.tbl_section = Table('section', self.db_meta, autoload=True)
         self.tbl_source = Table('source', self.db_meta, autoload=True)
         self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True)
+        self.tbl_src_format = Table('src_format', self.db_meta, autoload=True)
         self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True)
         self.tbl_suite = Table('suite', self.db_meta, autoload=True)
         self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True)
+        self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True)
         self.tbl_uid = Table('uid', self.db_meta, autoload=True)
 
     def __setupmappers(self):
@@ -2280,6 +2285,10 @@ class DBConn(Singleton):
                                  source_id = self.tbl_src_associations.c.source,
                                  source = relation(DBSource)))
 
+        mapper(SrcFormat, self.tbl_src_format,
+               properties = dict(src_format_id = self.tbl_src_format.c.id,
+                                 format_name = self.tbl_src_format.c.format_name))
+
         mapper(SrcUploader, self.tbl_src_uploaders,
                properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
                                  source_id = self.tbl_src_uploaders.c.source,
@@ -2298,6 +2307,12 @@ class DBConn(Singleton):
                                  arch_id = self.tbl_suite_architectures.c.architecture,
                                  architecture = relation(Architecture)))
 
+        mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
+               properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
+                                 suite = relation(Suite, backref='suitesrcformats'),
+                                 src_format_id = self.tbl_suite_src_formats.c.src_format,
+                                 src_format = relation(SrcFormat)))
+
         mapper(Uid, self.tbl_uid,
                properties = dict(uid_id = self.tbl_uid.c.id,
                                  fingerprint = relation(Fingerprint)))
index 2a3db36796d642692a7d0c8d859a9d5adad45604..25b407232f602575e9ee9eeb26660b7fea56c653 100755 (executable)
@@ -51,7 +51,7 @@ from config import Config
 from holding import Holding
 from dbconn import *
 from summarystats import SummaryStats
-from utils import parse_changes
+from utils import parse_changes, check_dsc_files
 from textutils import fix_maintainer
 from binary import Binary
 
@@ -74,7 +74,7 @@ def get_type(f, session):
     # Determine the type
     if f.has_key("dbtype"):
         file_type = file["dbtype"]
-    elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+    elif re_source_ext.match(f["type"]):
         file_type = "dsc"
     else:
         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
@@ -354,7 +354,7 @@ class Upload(object):
     ###########################################################################
     def load_changes(self, filename):
         """
-        @rtype boolean
+        @rtype: boolean
         @rvalue: whether the changes file was valid or not.  We may want to
                  reject even if this is True (see what gets put in self.rejects).
                  This is simply to prevent us even trying things later which will
@@ -714,7 +714,7 @@ class Upload(object):
             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
 
         # Ensure the source version matches the version in the .changes file
-        if entry["type"] == "orig.tar.gz":
+        if re_is_orig_source.match(f):
             changes_version = self.pkg.changes["chopversion2"]
         else:
             changes_version = self.pkg.changes["chopversion"]
@@ -922,7 +922,7 @@ class Upload(object):
                 self.rejects.append("source only uploads are not supported.")
 
     ###########################################################################
-    def check_dsc(self, action=True):
+    def check_dsc(self, action=True, session=None):
         """Returns bool indicating whether or not the source changes are valid"""
         # Ensure there is source to check
         if not self.pkg.changes["architecture"].has_key("source"):
@@ -982,10 +982,11 @@ class Upload(object):
         if not re_valid_version.match(self.pkg.dsc["version"]):
             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
 
-        # Bumping the version number of the .dsc breaks extraction by stable's
-        # dpkg-source.  So let's not do that...
-        if self.pkg.dsc["format"] != "1.0":
-            self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
+        # Only a limited list of source formats are allowed in each suite
+        for dist in self.pkg.changes["distribution"].keys():
+            allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
+            if self.pkg.dsc["format"] not in allowed:
+                self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
 
         # Validate the Maintainer field
         try:
@@ -1017,19 +1018,8 @@ class Upload(object):
         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
 
-        # Ensure there is a .tar.gz in the .dsc file
-        has_tar = False
-        for f in self.pkg.dsc_files.keys():
-            m = re_issource.match(f)
-            if not m:
-                self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
-                continue
-            ftype = m.group(3)
-            if ftype == "orig.tar.gz" or ftype == "tar.gz":
-                has_tar = True
-
-        if not has_tar:
-            self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
+        # Ensure the Files field contain only what's expected
+        self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
 
         # Ensure source is newer than existing source in target suites
         session = DBConn().session()
@@ -1066,16 +1056,19 @@ class Upload(object):
                 if not os.path.exists(src):
                     return
                 ftype = m.group(3)
-                if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
+                if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \
+                   pkg.orig_files[f].has_key("path"):
                     continue
                 dest = os.path.join(os.getcwd(), f)
                 os.symlink(src, dest)
 
-        # If the orig.tar.gz is not a part of the upload, create a symlink to the
-        # existing copy.
-        if self.pkg.orig_tar_gz:
-            dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
-            os.symlink(self.pkg.orig_tar_gz, dest)
+        # If the orig files are not a part of the upload, create symlinks to the
+        # existing copies.
+        for orig_file in self.pkg.orig_files.keys():
+            if not self.pkg.orig_files[orig_file].has_key("path"):
+                continue
+            dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
+            os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
 
         # Extract the source
         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
@@ -1118,10 +1111,11 @@ class Upload(object):
         #      We should probably scrap or rethink the whole reprocess thing
         # Bail out if:
         #    a) there's no source
-        # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
-        # or c) the orig.tar.gz is MIA
+        # or b) reprocess is 2 - we will do this check next time when orig
+        #       tarball is in 'files'
+        # or c) the orig files are MIA
         if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
-           or self.pkg.orig_tar_gz == -1:
+           or len(self.pkg.orig_files) == 0:
             return
 
         tmpdir = utils.temp_dirname()
@@ -1935,7 +1929,7 @@ distribution."""
             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
-            self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
+            self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
             # Write the rejection email out as the <foo>.reason file
             os.write(reason_fd, reject_mail_message)
@@ -2143,7 +2137,7 @@ distribution."""
         """
 
         @warning: NB: this function can remove entries from the 'files' index [if
-         the .orig.tar.gz is a duplicate of the one in the archive]; if
+         the orig tarball is a duplicate of the one in the archive]; if
          you're iterating over 'files' and call this function as part of
          the loop, be sure to add a check to the top of the loop to
          ensure you haven't just tried to dereference the deleted entry.
@@ -2151,7 +2145,8 @@ distribution."""
         """
 
         Cnf = Config()
-        self.pkg.orig_tar_gz = None
+        self.pkg.orig_files = {} # XXX: do we need to clear it?
+        orig_files = self.pkg.orig_files
 
         # Try and find all files mentioned in the .dsc.  This has
         # to work harder to cope with the multiple possible
@@ -2185,7 +2180,7 @@ distribution."""
                 if len(ql) > 0:
                     # Ignore exact matches for .orig.tar.gz
                     match = 0
-                    if dsc_name.endswith(".orig.tar.gz"):
+                    if re_is_orig_source.match(dsc_name):
                         for i in ql:
                             if self.pkg.files.has_key(dsc_name) and \
                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
@@ -2195,13 +2190,15 @@ distribution."""
                                 # This would fix the stupidity of changing something we often iterate over
                                 # whilst we're doing it
                                 del self.pkg.files[dsc_name]
-                                self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
+                                if not orig_files.has_key(dsc_name):
+                                    orig_files[dsc_name] = {}
+                                orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
                                 match = 1
 
                     if not match:
                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
 
-            elif dsc_name.endswith(".orig.tar.gz"):
+            elif re_is_orig_source.match(dsc_name):
                 # Check in the pool
                 ql = get_poolfile_like_name(dsc_name, session)
 
@@ -2239,9 +2236,11 @@ distribution."""
                     # need this for updating dsc_files in install()
                     dsc_entry["files id"] = x.file_id
                     # See install() in process-accepted...
-                    self.pkg.orig_tar_id = x.file_id
-                    self.pkg.orig_tar_gz = old_file
-                    self.pkg.orig_tar_location = x.location.location_id
+                    if not orig_files.has_key(dsc_name):
+                        orig_files[dsc_name] = {}
+                    orig_files[dsc_name]["id"] = x.file_id
+                    orig_files[dsc_name]["path"] = old_file
+                    orig_files[dsc_name]["location"] = x.location.location_id
                 else:
                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
                     # Not there? Check the queue directories...
@@ -2255,11 +2254,12 @@ distribution."""
                             in_otherdir_fh.close()
                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
                             found = in_otherdir
-                            self.pkg.orig_tar_gz = in_otherdir
+                            if not orig_files.has_key(dsc_name):
+                                orig_files[dsc_name] = {}
+                            orig_files[dsc_name]["path"] = in_otherdir
 
                     if not found:
                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
-                        self.pkg.orig_tar_gz = -1
                         continue
             else:
                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
index 7560511f6edd6cae9fa2a52d204d8785f6076bab..cc5d1be6873de251852e92b0e85d8e1a7900f3e1 100755 (executable)
@@ -42,7 +42,11 @@ re_arch_from_filename = re.compile(r"/binary-[^/]+/")
 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
 
-re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
+orig_source_ext_re = r"orig(?:-.+)?\.tar\.(?:gz|bz2)"
+re_orig_source_ext = re.compile(orig_source_ext_re + "$")
+re_source_ext = re.compile("(" + orig_source_ext_re + r"|debian\.tar\.(?:gz|bz2)|diff\.gz|tar\.(?:gz|bz2)|dsc)$")
+re_issource = re.compile(r"(.+)_(.+?)\." + re_source_ext.pattern)
+re_is_orig_source = re.compile (r"(.+)_(.+?)\.orig(?:-.+)?\.tar\.(?:gz|bz2)$")
 
 re_single_line_field = re.compile(r"^(\S*?)\s*:\s*(.*)")
 re_multi_line_field = re.compile(r"^\s(.*)")
diff --git a/daklib/srcformats.py b/daklib/srcformats.py
new file mode 100644 (file)
index 0000000..0a74c19
--- /dev/null
@@ -0,0 +1,65 @@
+import re
+
+srcformats = []
+
+class SourceFormat(type):
+    def __new__(cls, name, bases, attrs):
+        klass = super(SourceFormat, cls).__new__(cls, name, bases, attrs)
+        srcformats.append(klass)
+
+        assert str(klass.name)
+        assert iter(klass.requires)
+        assert iter(klass.disallowed)
+
+        klass.re_format = re.compile(klass.format)
+
+        return klass
+
+    @classmethod
+    def reject_msgs(cls, has):
+        if len(cls.requires) != len([x for x in cls.requires if has[x]]):
+            yield "lack of required files for format %s" % cls.name
+
+        for key in cls.disallowed:
+            if has[key]:
+                yield "contains source files not allowed in format %s" % cls.name
+
+class FormatOne(SourceFormat):
+    __metaclass__ = SourceFormat
+
+    name = '1.0'
+    format = r'1.0'
+
+    requires = ()
+    disallowed = ('debian_tar', 'more_orig_tar')
+
+    @classmethod
+    def reject_msgs(cls, has):
+        if not (has['native_tar_gz'] or (has['orig_tar_gz'] and has['debian_diff'])):
+            yield "no .tar.gz or .orig.tar.gz+.diff.gz in 'Files' field."
+        if has['native_tar_gz'] and has['debian_diff']:
+            yield "native package with diff makes no sense"
+        if (has['orig_tar_gz'] != has['orig_tar']) or \
+           (has['native_tar_gz'] != has['native_tar']):
+            yield "contains source files not allowed in format %s" % cls.name
+
+        for msg in super(FormatOne, cls).reject_msgs(has):
+            yield msg
+
+class FormatThree(SourceFormat):
+    __metaclass__ = SourceFormat
+
+    name = '3.x (native)'
+    format = r'3\.\d+ \(native\)'
+
+    requires = ('native_tar',)
+    disallowed = ('orig_tar', 'debian_diff', 'debian_tar', 'more_orig_tar')
+
+class FormatThreeQuilt(SourceFormat):
+    __metaclass__ = SourceFormat
+
+    name = '3.x (quilt)'
+    format = r'3\.\d+ \(quilt\)'
+
+    requires = ('orig_tar', 'debian_tar')
+    disallowed = ('debian_diff', 'native_tar')
index a9dea9201cd7e25aa7373461eccfd901468022c8..40410fbb0c8583dca03ce139c3006dddcfcce836 100755 (executable)
@@ -39,14 +39,18 @@ import time
 import re
 import string
 import email as modemail
+import subprocess
 
 from dbconn import DBConn, get_architecture, get_component, get_suite
 from dak_exceptions import *
 from textutils import fix_maintainer
 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
                     re_multi_line_field, re_srchasver, re_verwithext, \
-                    re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
-                    re_whitespace_comment
+                    re_parse_maintainer, re_taint_free, re_gpg_uid, \
+                    re_re_mark, re_whitespace_comment, re_issource
+
+from srcformats import srcformats
+from collections import defaultdict
 
 ################################################################################
 
@@ -60,6 +64,20 @@ key_uid_email_cache = {}  #: Cache for email addresses from gpg key uids
 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
                 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
 
+import commands
+def dak_getstatusoutput(cmd):
+    pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
+        stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
+    output = str.join("", pipe.stdout.readlines())
+
+    ret = pipe.wait()
+    if ret is None:
+        ret = 0
+
+    return ret, output
+commands.getstatusoutput = dak_getstatusoutput
+
 ################################################################################
 
 def html_escape(s):
@@ -332,6 +350,83 @@ def check_size(where, files):
 
 ################################################################################
 
+def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
+    """
+    Verify that the files listed in the Files field of the .dsc are
+    those expected given the announced Format.
+
+    @type dsc_filename: string
+    @param dsc_filename: path of .dsc file
+
+    @type dsc: dict
+    @param dsc: the content of the .dsc parsed by C{parse_changes()}
+
+    @type dsc_files: dict
+    @param dsc_files: the file list returned by C{build_file_list()}
+
+    @rtype: list
+    @return: all errors detected
+    """
+    rejmsg = []
+
+    # Parse the file if needed
+    if dsc is None:
+        dsc = parse_changes(dsc_filename, signing_rules=1);
+
+    if dsc_files is None:
+        dsc_files = build_file_list(dsc, is_a_dsc=1)
+
+    # Ensure .dsc lists proper set of source files according to the format
+    # announced
+    has = defaultdict(lambda: 0)
+
+    ftype_lookup = (
+        (r'orig.tar.gz',               ('orig_tar_gz', 'orig_tar')),
+        (r'diff.gz',                   ('debian_diff',)),
+        (r'tar.gz',                    ('native_tar_gz', 'native_tar')),
+        (r'debian\.tar\.(gz|bz2)',     ('debian_tar',)),
+        (r'orig\.tar\.(gz|bz2)',       ('orig_tar',)),
+        (r'tar\.(gz|bz2)',             ('native_tar',)),
+        (r'orig-.+\.tar\.(gz|bz2)',    ('more_orig_tar',)),
+    )
+
+    for f in dsc_files.keys():
+        m = re_issource.match(f)
+        if not m:
+            rejmsg.append("%s: %s in Files field not recognised as source."
+                          % (dsc_filename, f))
+            continue
+
+        # Populate 'has' dictionary by resolving keys in lookup table
+        matched = False
+        for regex, keys in ftype_lookup:
+            if re.match(regex, m.group(3)):
+                matched = True
+                for key in keys:
+                    has[key] += 1
+                break
+
+        # File does not match anything in lookup table; reject
+        if not matched:
+            reject("%s: unexpected source file '%s'" % (dsc_filename, f))
+
+    # Check for multiple files
+    for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
+        if has[file_type] > 1:
+            rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
+
+    # Source format specific tests
+    for format in srcformats:
+        if format.re_format.match(dsc['format']):
+            rejmsg.extend([
+                '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
+            ])
+            break
+
+    return rejmsg
+
+################################################################################
+
 def check_hash_fields(what, manifest):
     """
     check_hash_fields ensures that there are no checksum fields in the
@@ -442,10 +537,10 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
         format = format[:2]
 
     if is_a_dsc:
-        # format = (1,0) are the only formats we currently accept,
         # format = (0,0) are missing format headers of which we still
         # have some in the archive.
-        if format != (1,0) and format != (0,0):
+        if format != (1,0) and format != (0,0) and \
+           format != (3,0,"quilt") and format != (3,0,"native"):
             raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
     else:
         if (format < (1,5) or format > (1,8)):
index 9187a541d20392dd39d1e98390baa59afeb19602..2f8fed1e87af2076b065e5e7c4c40f94d3243153 100755 (executable)
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+
 import unittest
 
 import os, sys
@@ -38,3 +40,6 @@ class re_parse_lintian(unittest.TestCase):
             self.MATCH('W: tzdata: binary-without-manpage usr/sbin/tzconfig').groups(),
             ('W', 'tzdata', 'binary-without-manpage', 'usr/sbin/tzconfig')
         )
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/test_srcformats.py b/tests/test_srcformats.py
new file mode 100755 (executable)
index 0000000..9fec4a8
--- /dev/null
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+
+import unittest
+
+import os, sys
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+from collections import defaultdict
+
+from daklib import srcformats
+
+class SourceFormatTestCase(unittest.TestCase):
+    def get_rejects(self, has_vars):
+        has = defaultdict(lambda: 0)
+        has.update(has_vars)
+        return list(self.fmt.reject_msgs(has))
+
+    def assertAccepted(self, has):
+        self.assertEqual(self.get_rejects(has), [])
+
+    def assertRejected(self, has):
+        self.assertNotEqual(self.get_rejects(has), [])
+
+class FormatOneTestCase(SourceFormatTestCase):
+    fmt = srcformats.FormatOne
+
+    def testEmpty(self):
+        self.assertRejected({})
+
+    def testNative(self):
+        self.assertAccepted({'native_tar': 1, 'native_tar_gz': 1})
+
+    def testStandard(self):
+        self.assertAccepted({
+            'orig_tar': 1,
+            'orig_tar_gz': 1,
+            'debian_diff': 1,
+        })
+
+    def testDisallowed(self):
+        self.assertRejected({
+            'native_tar': 1,
+            'native_tar_gz': 1,
+            'debian_tar': 1,
+        })
+        self.assertRejected({
+            'orig_tar': 1,
+            'orig_tar_gz': 1,
+            'debian_diff': 0,
+        })
+        self.assertRejected({
+            'native_tar': 1,
+            'native_tar_gz': 1,
+            'more_orig_tar': 1,
+        })
+        self.assertRejected({
+            'native_tar': 1,
+            'native_tar_gz': 1,
+            'debian_diff': 1,
+        })
+
+class FormatTreeTestCase(SourceFormatTestCase):
+    fmt = srcformats.FormatThree
+
+    def testEmpty(self):
+        self.assertRejected({})
+
+    def testSimple(self):
+        self.assertAccepted({'native_tar': 1})
+
+    def testDisallowed(self):
+        self.assertRejected({'native_tar': 1, 'orig_tar': 1})
+        self.assertRejected({'native_tar': 1, 'debian_diff': 1})
+        self.assertRejected({'native_tar': 1, 'debian_tar': 1})
+        self.assertRejected({'native_tar': 1, 'more_orig_tar': 1})
+
+class FormatTreeQuiltTestCase(SourceFormatTestCase):
+    fmt = srcformats.FormatThreeQuilt
+
+    def testEmpty(self):
+        self.assertRejected({})
+
+    def testSimple(self):
+        self.assertAccepted({'orig_tar': 1, 'debian_tar': 1})
+
+    def testMultipleTarballs(self):
+        self.assertAccepted({
+            'orig_tar': 1,
+            'debian_tar': 1,
+            'more_orig_tar': 42,
+        })
+
+    def testDisallowed(self):
+        self.assertRejected({
+            'orig_tar': 1,
+            'debian_tar': 1,
+            'debian_diff': 1
+        })
+        self.assertRejected({
+            'orig_tar': 1,
+            'debian_tar': 1,
+            'native_tar': 1,
+        })
+
+if __name__ == '__main__':
+    unittest.main()