]> git.decadent.org.uk Git - dak.git/commitdiff
merge from master
authorMike O'Connor <stew@vireo.org>
Fri, 30 Oct 2009 09:03:24 +0000 (10:03 +0100)
committerMike O'Connor <stew@vireo.org>
Fri, 30 Oct 2009 09:03:24 +0000 (10:03 +0100)
Signed-off-by: Mike O'Connor <stew@vireo.org>
24 files changed:
config/debian/cron.dinstall
config/debian/cron.weekly
config/debian/lintian.tags
dak/cruft_report.py
dak/dak.py
dak/dakdb/update16.py [new file with mode: 0755]
dak/generate_index_diffs.py
dak/import_keyring.py
dak/process_new.py
dak/process_unchecked.py
dak/update_db.py
daklib/daklog.py
daklib/dbconn.py
daklib/formats.py [new file with mode: 0644]
daklib/queue.py
daklib/srcformats.py
daklib/utils.py
docs/README.quotes
scripts/debian/byhand-di
scripts/debian/mkfilesindices
scripts/debian/mklslar
scripts/debian/mkmaintainers
tests/test_formats.py [new file with mode: 0755]
tests/test_srcformats.py

index 1c9fa5afefff3284fd27f860fcb63d40d5aad16d..200c7a646986cbf7b189a1d28a7c2dee3be5773a 100755 (executable)
@@ -197,9 +197,6 @@ function accepted() {
 function cruft() {
     log "Checking for cruft in overrides"
     dak check-overrides
-
-    log "Fixing symlinks in $ftpdir"
-    symlinks -d -r $ftpdir
 }
 
 function msfl() {
@@ -252,6 +249,7 @@ function mpfm() {
 function packages() {
     log "Generating Packages and Sources files"
     cd $configdir
+    GZIP='--rsyncable' ; export GZIP
     apt-ftparchive generate apt.conf
 }
 
@@ -905,14 +903,6 @@ GO=(
 )
 stage $GO &
 
-GO=(
-    FUNC="aptftpcleanup"
-    TIME="apt-ftparchive cleanup"
-    ARGS=""
-    ERR="false"
-)
-stage $GO &
-
 GO=(
     FUNC="merkel3"
     TIME="merkel ddaccessible sync"
@@ -927,6 +917,14 @@ GO=(
     ARGS=""
     ERR=""
 )
+stage $GO &
+
+GO=(
+    FUNC="aptftpcleanup"
+    TIME="apt-ftparchive cleanup"
+    ARGS=""
+    ERR="false"
+)
 stage $GO
 
 log "Daily cron scripts successful, all done"
index 5ab9d8b88a7efcb64f5db92f74ad72c33b3a3ff4..34f0c64a4a535aa04da929ef51c4f408ddd0e371 100755 (executable)
@@ -57,6 +57,9 @@ apt-ftparchive -q clean apt.conf.buildd
 echo "Update wanna-build database dump"
 /org/ftp.debian.org/scripts/nfu/get-w-b-db
 
+echo "Fixing symlinks in $ftpdir"
+symlinks -d -r $ftpdir
+
 echo "Finally, all is done, compressing logfile"
 exec > /dev/null 2>&1
 
index 1c05410ce2b63a3066c078f1d8e4186fb2854362..0dabaf58e9684760c1e648a4dc00bb7ae184b090 100644 (file)
@@ -10,7 +10,11 @@ lintian:
     - binary-with-bad-dynamic-table
     - usr-share-doc-symlink-without-dependency
     - mknod-in-maintainer-script
+    - package-contains-info-dir-file
   error:
+    - wrong-file-owner-uid-or-gid
+    - bad-relation
+    - FSSTND-dir-in-usr
     - binary-in-etc
     - missing-dependency-on-perlapi
     - copyright-lists-upstream-authors-with-dh_make-boilerplate
index 63374859fa8019a10165ec318dea5cea14c38ce2..0a1534c59dfaf8e0c6bedf9478f0133c81a12e8c 100755 (executable)
@@ -357,9 +357,9 @@ def main ():
 
     # Set up checks based on mode
     if Options["Mode"] == "daily":
-        checks = [ "nbs", "nviu", "obsolete source" ]
+        checks = [ "nbs", "nviu", "nvit", "obsolete source" ]
     elif Options["Mode"] == "full":
-        checks = [ "nbs", "nviu", "obsolete source", "nfu", "dubious nbs", "bnb", "bms", "anais" ]
+        checks = [ "nbs", "nviu", "nvit", "obsolete source", "nfu", "dubious nbs", "bnb", "bms", "anais" ]
     else:
         utils.warn("%s is not a recognised mode - only 'full' or 'daily' are understood." % (Options["Mode"]))
         usage(1)
@@ -520,6 +520,9 @@ def main ():
     if "nviu" in checks:
         do_newer_version('unstable', 'experimental', 'NVIU', session)
 
+    if "nvit" in checks:
+        do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session)
+
     if "nbs" in checks:
         do_nbs(real_nbs)
 
index 052f3b3ef7b8b7e48717116809771f20d80d75d6..f9839ea0052d9f4d4fab1b0a846555b40734a5db 100755 (executable)
@@ -34,8 +34,13 @@ G{importgraph}
 ################################################################################
 
 import sys
+import traceback
 import daklib.utils
 
+from daklib.daklog import Logger
+from daklib.config import Config
+from daklib.dak_exceptions import CantOpenError
+
 ################################################################################
 
 def init():
@@ -152,6 +157,12 @@ Available commands:"""
 def main():
     """Launch dak functionality."""
 
+
+    try:
+        logger = Logger(Config(), 'dak top-level', print_starting=False)
+    except CantOpenError:
+        logger = None
+
     functionality = init()
     modules = [ command for (command, _) in functionality ]
 
@@ -189,7 +200,21 @@ def main():
     # Invoke the module
     module = __import__(cmdname.replace("-","_"))
 
-    module.main()
+    try:
+        module.main()
+    except KeyboardInterrupt:
+        msg = 'KeyboardInterrupt caught; exiting'
+        print msg
+        if logger:
+            logger.log([msg])
+        sys.exit(1)
+    except SystemExit:
+        pass
+    except:
+        if logger:
+            for line in traceback.format_exc().split('\n')[:-1]:
+                logger.log(['exception', line])
+        raise
 
 ################################################################################
 
diff --git a/dak/dakdb/update16.py b/dak/dakdb/update16.py
new file mode 100755 (executable)
index 0000000..eca9b48
--- /dev/null
@@ -0,0 +1,178 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding tables for key-based ACLs and blocks
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009  Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def do_update(self):
+    print "Adding tables for handling key-based ACLs and upload blocks"
+
+    try:
+        c = self.db.cursor()
+
+        # Fix up some older table permissions
+        c.execute("GRANT SELECT ON src_format TO public")
+        c.execute("GRANT ALL ON src_format TO ftpmaster")
+        c.execute("GRANT USAGE ON src_format_id_seq TO ftpmaster")
+
+        c.execute("GRANT SELECT ON suite_src_formats TO public")
+        c.execute("GRANT ALL ON suite_src_formats TO ftpmaster")
+
+        # Source ACLs table
+        print "Source ACLs table"
+        c.execute("""
+        CREATE TABLE source_acl (
+              id SERIAL PRIMARY KEY,
+              access_level TEXT UNIQUE NOT NULL
+        )
+        """)
+
+        ## Can upload all packages
+        c.execute("INSERT INTO source_acl (access_level) VALUES ('full')")
+        ## Can upload only packages marked as DM upload allowed
+        c.execute("INSERT INTO source_acl (access_level) VALUES ('dm')")
+
+        c.execute("GRANT SELECT ON source_acl TO public")
+        c.execute("GRANT ALL ON source_acl TO ftpmaster")
+        c.execute("GRANT USAGE ON source_acl_id_seq TO ftpmaster")
+
+        # Binary ACLs table
+        print "Binary ACLs table"
+        c.execute("""
+        CREATE TABLE binary_acl (
+              id SERIAL PRIMARY KEY,
+              access_level TEXT UNIQUE NOT NULL
+        )
+        """)
+
+        ## Can upload any architectures of binary packages
+        c.execute("INSERT INTO binary_acl (access_level) VALUES ('full')")
+        ## Can upload debs where architectures are based on the map table binary_acl_map
+        c.execute("INSERT INTO binary_acl (access_level) VALUES ('map')")
+
+        c.execute("GRANT SELECT ON binary_acl TO public")
+        c.execute("GRANT ALL ON binary_acl TO ftpmaster")
+        c.execute("GRANT USAGE ON binary_acl_id_seq TO ftpmaster")
+
+        # This is only used if binary_acl is 2 for the fingerprint concerned
+        c.execute("""
+        CREATE TABLE binary_acl_map (
+              id SERIAL PRIMARY KEY,
+              fingerprint_id INT4 REFERENCES fingerprint (id) NOT NULL,
+              architecture_id INT4 REFERENCES architecture (id) NOT NULL,
+
+              UNIQUE (fingerprint_id, architecture_id)
+        )""")
+
+        c.execute("GRANT SELECT ON binary_acl_map TO public")
+        c.execute("GRANT ALL ON binary_acl_map TO ftpmaster")
+        c.execute("GRANT USAGE ON binary_acl_map_id_seq TO ftpmaster")
+
+        ## NULL means no source upload access (i.e. any upload containing source
+        ## will be rejected)
+        c.execute("ALTER TABLE fingerprint ADD COLUMN source_acl_id INT4 REFERENCES source_acl(id) DEFAULT NULL")
+
+        ## NULL means no binary upload access
+        c.execute("ALTER TABLE fingerprint ADD COLUMN binary_acl_id INT4 REFERENCES binary_acl(id) DEFAULT NULL")
+
+        ## TRUE here means that if the person doesn't have binary upload permissions for
+        ## an architecture, we'll reject the .changes.  FALSE means that we'll simply
+        ## dispose of those particular binaries
+        c.execute("ALTER TABLE fingerprint ADD COLUMN binary_reject BOOLEAN NOT NULL DEFAULT TRUE")
+
+        # Blockage table (replaces the hard coded stuff we used to have in extensions)
+        print "Adding blockage table"
+        c.execute("""
+        CREATE TABLE upload_blocks (
+              id             SERIAL PRIMARY KEY,
+              source         TEXT NOT NULL,
+              version        TEXT DEFAULT NULL,
+              fingerprint_id INT4 REFERENCES fingerprint (id),
+              uid_id         INT4 REFERENCES uid (id),
+              reason         TEXT NOT NULL,
+
+              CHECK (fingerprint_id IS NOT NULL OR uid_id IS NOT NULL)
+        )""")
+
+        c.execute("GRANT SELECT ON upload_blocks TO public")
+        c.execute("GRANT ALL ON upload_blocks TO ftpmaster")
+        c.execute("GRANT USAGE ON upload_blocks_id_seq TO ftpmaster")
+
+        c.execute("ALTER TABLE keyrings ADD COLUMN default_source_acl_id INT4 REFERENCES source_acl (id) DEFAULT NULL")
+        c.execute("ALTER TABLE keyrings ADD COLUMN default_binary_acl_id INT4 REFERENCES binary_acl (id) DEFAULT NULL")
+        c.execute("ALTER TABLE keyrings ADD COLUMN default_binary_reject BOOLEAN NOT NULL DEFAULT TRUE")
+        # Set up keyring priorities
+        c.execute("ALTER TABLE keyrings ADD COLUMN priority INT4 NOT NULL DEFAULT 100")
+        # And then we don't need the DM stuff any more
+        c.execute("ALTER TABLE keyrings DROP COLUMN debian_maintainer")
+
+        # Default ACLs for keyrings
+        c.execute("""
+        CREATE TABLE keyring_acl_map (
+              id SERIAL PRIMARY KEY,
+              keyring_id      INT4 REFERENCES keyrings (id) NOT NULL,
+              architecture_id INT4 REFERENCES architecture (id) NOT NULL,
+
+              UNIQUE (keyring_id, architecture_id)
+        )""")
+
+        c.execute("GRANT SELECT ON keyring_acl_map TO public")
+        c.execute("GRANT ALL ON keyring_acl_map TO ftpmaster")
+        c.execute("GRANT USAGE ON keyring_acl_map_id_seq TO ftpmaster")
+
+        # Set up some default stuff; default to old behaviour
+        print "Setting up some defaults"
+
+        c.execute("""UPDATE keyrings SET default_source_acl_id = (SELECT id FROM source_acl WHERE access_level = 'full'),
+                                         default_binary_acl_id = (SELECT id FROM binary_acl WHERE access_level = 'full')""")
+
+        c.execute("""UPDATE keyrings SET default_source_acl_id = (SELECT id FROM source_acl WHERE access_level = 'dm'),
+                                         default_binary_acl_id = (SELECT id FROM binary_acl WHERE access_level = 'full')
+                                     WHERE name = 'debian-maintainers.gpg'""")
+
+        c.execute("""UPDATE keyrings SET priority = 90 WHERE name = 'debian-maintainers.gpg'""")
+
+        # Initialize the existing keys
+        c.execute("""UPDATE fingerprint SET binary_acl_id = (SELECT default_binary_acl_id FROM keyrings
+                                                              WHERE keyrings.id = fingerprint.keyring)""")
+
+        c.execute("""UPDATE fingerprint SET source_acl_id = (SELECT default_source_acl_id FROM keyrings
+                                                              WHERE keyrings.id = fingerprint.keyring)""")
+
+        print "Updating config version"
+        c.execute("UPDATE config SET value = '16' WHERE name = 'db_revision'")
+        self.db.commit()
+
+    except psycopg2.ProgrammingError, msg:
+        self.db.rollback()
+        raise DBUpdateError, "Unable to apply ACLs update (16), rollback issued. Error message : %s" % (str(msg))
index 4222c0cf4fb9446b0bc19a7127dab4e9fe4da24e..7e4b0058e335186a26d609ee5e5cb18ad0d6165d 100755 (executable)
@@ -254,7 +254,7 @@ def genchanges(Options, outdir, oldfile, origfile, maxdiffs = 14):
         if not os.path.isdir(outdir):
             os.mkdir(outdir)
 
-        w = os.popen("diff --ed - %s | gzip -c -9 > %s.gz" %
+        w = os.popen("diff --ed - %s | gzip --rsyncable -c -9 > %s.gz" %
                      (newfile, difffile), "w")
         pipe_file(oldf, w)
         oldf.close()
index 0b670357f2999c9d8d1a066e1290eaa24eda1f42..e26eb7e54d390d46641a506fba310427262267ee 100755 (executable)
@@ -2,6 +2,7 @@
 
 """ Imports a keyring into the database """
 # Copyright (C) 2007  Anthony Towns <aj@erisian.com.au>
+# Copyright (C) 2009  Mark Hymers <mhy@debian.org>
 
 # This program is free software; you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
 ################################################################################
 
 import sys, os, re
-import apt_pkg, ldap, email.Utils
+import apt_pkg, ldap
 
 from daklib.config import Config
 from daklib.dbconn import *
 
-
 # Globals
 Options = None
 
@@ -38,6 +38,7 @@ def get_uid_info(session):
     for (keyid, uid, name) in q.fetchall():
         byname[uid] = (keyid, name)
         byid[keyid] = (uid, name)
+
     return (byname, byid)
 
 def get_fingerprint_info(session):
@@ -49,126 +50,6 @@ def get_fingerprint_info(session):
 
 ################################################################################
 
-def get_ldap_name(entry):
-    name = []
-    for k in ["cn", "mn", "sn"]:
-        ret = entry.get(k)
-        if ret and ret[0] != "" and ret[0] != "-":
-            name.append(ret[0])
-    return " ".join(name)
-
-################################################################################
-
-class Keyring(object):
-    gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
-                     " --with-colons --fingerprint --fingerprint"
-    keys = {}
-    fpr_lookup = {}
-
-    def de_escape_gpg_str(self, str):
-        esclist = re.split(r'(\\x..)', str)
-        for x in range(1,len(esclist),2):
-            esclist[x] = "%c" % (int(esclist[x][2:],16))
-        return "".join(esclist)
-
-    def __init__(self, keyring):
-        self.cnf = Config()
-        k = os.popen(self.gpg_invocation % keyring, "r")
-        keys = self.keys
-        key = None
-        fpr_lookup = self.fpr_lookup
-        signingkey = False
-        for line in k.xreadlines():
-            field = line.split(":")
-            if field[0] == "pub":
-                key = field[4]
-                (name, addr) = email.Utils.parseaddr(field[9])
-                name = re.sub(r"\s*[(].*[)]", "", name)
-                if name == "" or addr == "" or "@" not in addr:
-                    name = field[9]
-                    addr = "invalid-uid"
-                name = self.de_escape_gpg_str(name)
-                keys[key] = {"email": addr}
-                if name != "": keys[key]["name"] = name
-                keys[key]["aliases"] = [name]
-                keys[key]["fingerprints"] = []
-                signingkey = True
-            elif key and field[0] == "sub" and len(field) >= 12:
-                signingkey = ("s" in field[11])
-            elif key and field[0] == "uid":
-                (name, addr) = email.Utils.parseaddr(field[9])
-                if name and name not in keys[key]["aliases"]:
-                    keys[key]["aliases"].append(name)
-            elif signingkey and field[0] == "fpr":
-                keys[key]["fingerprints"].append(field[9])
-                fpr_lookup[field[9]] = key
-
-    def generate_desired_users(self):
-        if Options["Generate-Users"]:
-            format = Options["Generate-Users"]
-            return self.generate_users_from_keyring(format)
-        if Options["Import-Ldap-Users"]:
-            return self.import_users_from_ldap()
-        return ({}, {})
-
-    def import_users_from_ldap(self):
-        LDAPDn = self.cnf["Import-LDAP-Fingerprints::LDAPDn"]
-        LDAPServer = self.cnf["Import-LDAP-Fingerprints::LDAPServer"]
-        l = ldap.open(LDAPServer)
-        l.simple_bind_s("","")
-        Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
-               "(&(keyfingerprint=*)(gidnumber=%s))" % (self.cnf["Import-Users-From-Passwd::ValidGID"]),
-               ["uid", "keyfingerprint", "cn", "mn", "sn"])
-
-        ldap_fin_uid_id = {}
-
-        byuid = {}
-        byname = {}
-        keys = self.keys
-        fpr_lookup = self.fpr_lookup
-
-        for i in Attrs:
-            entry = i[1]
-            uid = entry["uid"][0]
-            name = get_ldap_name(entry)
-            fingerprints = entry["keyFingerPrint"]
-            keyid = None
-            for f in fingerprints:
-                key = fpr_lookup.get(f, None)
-                if key not in keys: continue
-                keys[key]["uid"] = uid
-
-                if keyid != None: continue
-                keyid = get_or_set_uid(uid).uid
-                byuid[keyid] = (uid, name)
-                byname[uid] = (keyid, name)
-
-        return (byname, byuid)
-
-    def generate_users_from_keyring(self, format):
-        byuid = {}
-        byname = {}
-        keys = self.keys
-        any_invalid = False
-        for x in keys.keys():
-            if keys[x]["email"] == "invalid-uid":
-                any_invalid = True
-                keys[x]["uid"] = format % "invalid-uid"
-            else:
-                uid = format % keys[x]["email"]
-                keyid = get_or_set_uid(uid).uid
-                byuid[keyid] = (uid, keys[x]["name"])
-                byname[uid] = (keyid, keys[x]["name"])
-                keys[x]["uid"] = uid
-        if any_invalid:
-            uid = format % "invalid-uid"
-            keyid = get_or_set_uid(uid).uid
-            byuid[keyid] = (uid, "ungeneratable user id")
-            byname[uid] = (keyid, "ungeneratable user id")
-        return (byname, byuid)
-
-################################################################################
-
 def usage (exit_code=0):
     print """Usage: dak import-keyring [OPTION]... [KEYRING]
   -h, --help                  show this help and exit.
@@ -197,6 +78,7 @@ def main():
     ### Parse options
 
     Options = cnf.SubTree("Import-Keyring::Options")
+
     if Options["Help"]:
         usage()
 
@@ -204,7 +86,6 @@ def main():
         usage(1)
 
     ### Keep track of changes made
-
     changes = []   # (uid, changes strings)
 
     ### Initialise
@@ -216,22 +97,21 @@ def main():
     ### Parse the keyring
 
     keyringname = keyring_names[0]
-    keyring = Keyring(keyringname)
-
-    is_dm = "false"
-    if cnf.has_key("Import-Keyring::"+keyringname+"::Debian-Maintainer"):
-        session.execute("UPDATE keyrings SET debian_maintainer = :dm WHERE name = :name",
-                        {'dm': cnf["Import-Keyring::"+keyringname+"::Debian-Maintainer"],
-                         'name': keyringname.split("/")[-1]})
+    keyring = get_keyring(keyringname, session)
+    if not keyring:
+        print "E: Can't load keyring %s from database" % keyringname
+        sys.exit(1)
 
-        is_dm = cnf["Import-Keyring::"+keyringname+"::Debian-Maintainer"]
-
-    keyring_id = get_or_set_keyring(
-        keyringname.split("/")[-1], session,
-    ).keyring_id
+    keyring.load_keys(keyringname)
 
     ### Generate new uid entries if they're needed (from LDAP or the keyring)
-    (desuid_byname, desuid_byid) = keyring.generate_desired_users()
+    if Options["Generate-Users"]:
+        format = Options["Generate-Users"]
+        (desuid_byname, desuid_byid) = keyring.generate_users_from_keyring(Options["Generate-Users"], session)
+    elif Options["Import-Ldap-Users"]:
+        (desuid_byname, desuid_byid) = keyring.import_users_from_ldap(session)
+    else:
+        (desuid_byname, desuid_byid) = ({}, {})
 
     ### Cache all the existing uid entries
     (db_uid_byname, db_uid_byid) = get_uid_info(session)
@@ -240,7 +120,7 @@ def main():
     for keyid in desuid_byid.keys():
         uid = (keyid, desuid_byid[keyid][0])
         name = desuid_byid[keyid][1]
-        oname = db_uid_byname[keyid][1]
+        oname = db_uid_byid[keyid][1]
         if name and oname != name:
             changes.append((uid[1], "Full name: %s" % (name)))
             session.execute("UPDATE uid SET name = :name WHERE id = :keyid",
@@ -258,17 +138,28 @@ def main():
         if keyid == None:
             keyid = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
         for y in keyring.keys[z]["fingerprints"]:
-            fpr[y] = (keyid,keyring_id)
+            fpr[y] = (keyid, keyring.keyring_id)
 
     # For any keys that used to be in this keyring, disassociate them.
     # We don't change the uid, leaving that for historical info; if
     # the id should change, it'll be set when importing another keyring.
 
     for f,(u,fid,kr) in db_fin_info.iteritems():
-        if kr != keyring_id: continue
-        if f in fpr: continue
+        if kr != keyring.keyring_id:
+            continue
+
+        if f in fpr:
+            continue
+
         changes.append((db_uid_byid.get(u, [None])[0], "Removed key: %s" % (f)))
-        session.execute("UPDATE fingerprint SET keyring = NULL WHERE id = :fprid", {'fprid': fid})
+        session.execute("""UPDATE fingerprint
+                              SET keyring = NULL,
+                                  source_acl_id = NULL,
+                                  binary_acl_id = NULL,
+                                  binary_reject = TRUE
+                            WHERE id = :fprid""", {'fprid': fid})
+
+        session.execute("""DELETE FROM binary_acl_map WHERE fingerprint_id = :fprid""", {'fprid': fid})
 
     # For the keys in this keyring, add/update any fingerprints that've
     # changed.
@@ -276,19 +167,36 @@ def main():
     for f in fpr:
         newuid = fpr[f][0]
         newuiduid = db_uid_byid.get(newuid, [None])[0]
+
         (olduid, oldfid, oldkid) = db_fin_info.get(f, [-1,-1,-1])
-        if olduid == None: olduid = -1
-        if oldkid == None: oldkid = -1
+
+        if olduid == None:
+            olduid = -1
+
+        if oldkid == None:
+            oldkid = -1
+
         if oldfid == -1:
             changes.append((newuiduid, "Added key: %s" % (f)))
+            fp = Fingerprint()
+            fp.fingerprint = f
+            fp.keyring_id = keyring.keyring_id
             if newuid:
-                session.execute("""INSERT INTO fingerprint (fingerprint, uid, keyring)
-                                        VALUES (:fpr, :uid, :keyring)""",
-                                {'fpr': f, 'uid': uid, 'keyring': keyring_id})
-            else:
-                session.execute("""INSERT INTO fingerprint (fingerprint, keyring)
-                                        VALUES (:fpr, :keyring)""",
-                                {'fpr': f, 'keyring': keyring_id})
+                fp.uid_id = newuid
+
+            fp.binary_acl_id = keyring.default_binary_acl_id
+            fp.source_acl_id = keyring.default_source_acl_id
+            fp.default_binary_reject = keyring.default_binary_reject
+            session.add(fp)
+            session.flush()
+
+            for k in keyring.keyring_acl_map:
+                ba = BinaryACLMap()
+                ba.fingerprint_id = fp.fingerprint_id
+                ba.architecture_id = k.architecture_id
+                session.add(ba)
+                session.flush()
+
         else:
             if newuid and olduid != newuid:
                 if olduid != -1:
@@ -297,25 +205,62 @@ def main():
                 else:
                     changes.append((newuiduid, "Linked key: %s" % f))
                     changes.append((newuiduid, "  (formerly unowned)"))
+
                 session.execute("UPDATE fingerprint SET uid = :uid WHERE id = :fpr",
                                 {'uid': newuid, 'fpr': oldfid})
 
-            if oldkid != keyring_id:
+            # Don't move a key from a keyring with a higher priority to a lower one
+            if oldkid != keyring.keyring_id:
+                movekey = False
+                if oldkid == -1:
+                    movekey = True
+                else:
+                    try:
+                        oldkeyring = session.query(Keyring).filter_by(keyring_id=oldkid).one()
+                    except NotFoundError:
+                        print "ERROR: Cannot find old keyring with id %s" % oldkid
+                        sys.exit(1)
+
+                    if oldkeyring.priority < keyring.priority:
+                        movekey = True
+
                 # Only change the keyring if it won't result in a loss of permissions
-                q = session.execute("SELECT debian_maintainer FROM keyrings WHERE id = :keyring",
-                                    {'keyring': keyring_id})
-                if is_dm == "false" and not q.fetchall()[0][0]:
-                    session.execute("UPDATE fingerprint SET keyring = :keyring WHERE id = :fpr",
-                                    {'keyring': keyring_id, 'fpr': oldfid})
+                if movekey:
+                    session.execute("""DELETE FROM binary_acl_map WHERE fingerprint_id = :fprid""", {'fprid': oldfid})
+
+                    session.execute("""UPDATE fingerprint
+                                          SET keyring = :keyring,
+                                              source_acl_id = :source_acl_id,
+                                              binary_acl_id = :binary_acl_id,
+                                              binary_reject = :binary_reject
+                                        WHERE id = :fpr""",
+                                    {'keyring': keyring.keyring_id,
+                                     'source_acl_id': keyring.default_source_acl_id,
+                                     'binary_acl_id': keyring.default_binary_acl_id,
+                                     'binary_reject': keyring.default_binary_reject,
+                                     'fpr': oldfid})
+
+                    session.flush()
+
+                    for k in keyring.keyring_acl_map:
+                        ba = BinaryACLMap()
+                        ba.fingerprint_id = oldfid
+                        ba.architecture_id = k.architecture_id
+                        session.add(ba)
+                        session.flush()
+
                 else:
-                    print "Key %s exists in both DM and DD keyrings. Not demoting." % (f)
+                    print "Key %s exists in both %s and %s keyrings. Not demoting." % (oldkeyring.keyring_name,
+                                                                                       keyring.keyring_name)
 
     # All done!
     session.commit()
 
+    # Print a summary
     changesd = {}
     for (k, v) in changes:
-        if k not in changesd: changesd[k] = ""
+        if k not in changesd:
+            changesd[k] = ""
         changesd[k] += "    %s\n" % (v)
 
     keys = changesd.keys()
index 9a6c8e330f787b7828a5ee14e8db8a21064c4e58..185157ac27290002961ebd4693e9c50fa94ff1de 100755 (executable)
@@ -77,43 +77,13 @@ Sections = None
 ################################################################################
 
 def recheck(upload, session):
-    files = upload.pkg.files
-
-    cnf = Config()
-    for f in files.keys():
-        # The .orig.tar.gz can disappear out from under us is it's a
-        # duplicate of one in the archive.
-        if not files.has_key(f):
-            continue
-        # Check that the source still exists
-        if files[f]["type"] == "deb":
-            source_version = files[f]["source version"]
-            source_package = files[f]["source package"]
-            if not upload.pkg.changes["architecture"].has_key("source") \
-               and not upload.source_exists(source_package, source_version, upload.pkg.changes["distribution"].keys()):
-                source_epochless_version = re_no_epoch.sub('', source_version)
-                dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
-                found = 0
-                for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
-                    if cnf.has_key("Dir::Queue::%s" % (q)):
-                        if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
-                            found = 1
-                if not found:
-                    upload.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
-
-        # Version and file overwrite checks
-        if files[f]["type"] == "deb":
-            upload.check_binary_against_db(f, session)
-        elif files[f]["type"] == "dsc":
-            upload.check_source_against_db(f, session)
-            upload.check_dsc_against_db(f, session)
-
+    upload.recheck()
     if len(upload.rejects) > 0:
         answer = "XXX"
         if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
             answer = 'S'
 
-        print "REJECT\n" + upload.rejects.join("\n"),
+        print "REJECT\n%s" % '\n'.join(upload.rejects)
         prompt = "[R]eject, Skip, Quit ?"
 
         while prompt.find(answer) == -1:
@@ -124,7 +94,7 @@ def recheck(upload, session):
             answer = answer[:1].upper()
 
         if answer == 'R':
-            upload.do_reject(manual=0, reject_message=upload.rejects.join("\n"))
+            upload.do_reject(manual=0, reject_message='\n'.join(upload.rejects))
             os.unlink(upload.pkg.changes_file[:-8]+".dak")
             return 0
         elif answer == 'S':
@@ -745,7 +715,6 @@ def usage (exit_code=0):
     print """Usage: dak process-new [OPTION]... [CHANGES]...
   -a, --automatic           automatic run
   -h, --help                show this help and exit.
-  -C, --comments-dir=DIR    use DIR as comments-dir, for [o-]p-u-new
   -m, --manual-reject=MSG   manual reject with `msg'
   -n, --no-action           don't do anything
   -t, --trainee             FTP Trainee mode
@@ -847,39 +816,6 @@ def lock_package(package):
     finally:
         os.unlink(path)
 
-# def move_to_dir (upload, dest, perms=0660, changesperms=0664):
-#     utils.move (upload.pkg.changes_file, dest, perms=changesperms)
-#     file_keys = upload.pkg.files.keys()
-#     for f in file_keys:
-#         utils.move (f, dest, perms=perms)
-
-# def is_source_in_queue_dir(qdir):
-#     entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
-#                 and x.endswith(".changes") ]
-#     for entry in entries:
-#         # read the .dak
-#         u = queue.Upload(Cnf)
-#         u.pkg.changes_file = os.path.join(qdir, entry)
-#         u.update_vars()
-#         if not u.pkg.changes["architecture"].has_key("source"):
-#             # another binary upload, ignore
-#             continue
-#         if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
-#             # another version, ignore
-#             continue
-#         # found it!
-#         return True
-#     return False
-
-# def move_to_holding(suite, queue_dir):
-#     print "Moving to %s holding area." % (suite.upper(),)
-#     if Options["No-Action"]:
-#      return
-#     Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
-#     Upload.dump_vars(queue_dir)
-#     move_to_dir(queue_dir, perms=0664)
-#     os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
 def _accept(upload):
     if Options["No-Action"]:
         return
@@ -887,87 +823,21 @@ def _accept(upload):
     upload.accept(summary, short_summary, targetdir=Config()["Dir::Queue::Newstage"])
     os.unlink(upload.pkg.changes_file[:-8]+".dak")
 
-# def do_accept_stableupdate(upload,suite, q):
-#     cnf = Config()
-#     queue_dir = cnf["Dir::Queue::%s" % (q,)]
-#     if not upload.pkg.changes["architecture"].has_key("source"):
-#         # It is not a sourceful upload.  So its source may be either in p-u
-#         # holding, in new, in accepted or already installed.
-#         if is_source_in_queue_dir(queue_dir):
-#             # It's in p-u holding, so move it there.
-#             print "Binary-only upload, source in %s." % (q,)
-#             move_to_holding(suite, queue_dir)
-#         elif Upload.source_exists(Upload.pkg.changes["source"],
-#                 Upload.pkg.changes["version"]):
-#             # dak tells us that there is source available.  At time of
-#             # writing this means that it is installed, so put it into
-#             # accepted.
-#             print "Binary-only upload, source installed."
-#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-#             _accept()
-#         elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
-#             # The source is in accepted, the binary cleared NEW: accept it.
-#             print "Binary-only upload, source in accepted."
-#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-#             _accept()
-#         elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
-#             # It's in NEW.  We expect the source to land in p-u holding
-#             # pretty soon.
-#             print "Binary-only upload, source in new."
-#             move_to_holding(suite, queue_dir)
-#         elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
-#             # It's in newstage.  Accept into the holding area
-#             print "Binary-only upload, source in newstage."
-#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-#             _accept()
-#         else:
-#             # No case applicable.  Bail out.  Return will cause the upload
-#             # to be skipped.
-#             print "ERROR"
-#             print "Stable update failed.  Source not found."
-#             return
-#     else:
-#         # We are handling a sourceful upload.  Move to accepted if currently
-#         # in p-u holding and to p-u holding otherwise.
-#         if is_source_in_queue_dir(queue_dir):
-#             print "Sourceful upload in %s, accepting." % (q,)
-#             _accept()
-#         else:
-#             move_to_holding(suite, queue_dir)
-
 def do_accept(upload):
     print "ACCEPT"
     cnf = Config()
     if not Options["No-Action"]:
         (summary, short_summary) = upload.build_summaries()
-#     if cnf.FindB("Dinstall::SecurityQueueHandling"):
-#         upload.dump_vars(cnf["Dir::Queue::Embargoed"])
-#         move_to_dir(cnf["Dir::Queue::Embargoed"])
-#         upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
-#         # Check for override disparities
-#         upload.Subst["__SUMMARY__"] = summary
-#     else:
-        # Stable updates need to be copied to proposed-updates holding
-        # area instead of accepted.  Sourceful uploads need to go
-        # to it directly, binaries only if the source has not yet been
-        # accepted into p-u.
-        for suite, q in [("proposed-updates", "ProposedUpdates"),
-                ("oldstable-proposed-updates", "OldProposedUpdates")]:
-            if not upload.pkg.changes["distribution"].has_key(suite):
-                continue
-            utils.fubar("stable accept not supported yet")
-#            return do_accept_stableupdate(suite, q)
-        # Just a normal upload, accept it...
-        _accept(upload)
-
-def check_status(files):
-    new = byhand = 0
-    for f in files.keys():
-        if files[f]["type"] == "byhand":
-            byhand = 1
-        elif files[f].has_key("new"):
-            new = 1
-    return (new, byhand)
+
+        if cnf.FindB("Dinstall::SecurityQueueHandling"):
+            upload.dump_vars(cnf["Dir::Queue::Embargoed"])
+            upload.move_to_dir(cnf["Dir::Queue::Embargoed"])
+            upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
+            # Check for override disparities
+            upload.Subst["__SUMMARY__"] = summary
+        else:
+            # Just a normal upload, accept it...
+            _accept(upload)
 
 def do_pkg(changes_file, session):
     u = Upload()
@@ -1024,58 +894,6 @@ def end():
 
 ################################################################################
 
-# def do_comments(dir, opref, npref, line, fn):
-#     for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
-#         lines = open("%s/%s" % (dir, comm)).readlines()
-#         if len(lines) == 0 or lines[0] != line + "\n": continue
-#         changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
-#                                 and x.endswith(".changes") ]
-#         changes_files = sort_changes(changes_files)
-#         for f in changes_files:
-#             f = utils.validate_changes_file_arg(f, 0)
-#             if not f: continue
-#             print "\n" + f
-#             fn(f, "".join(lines[1:]))
-
-#         if opref != npref and not Options["No-Action"]:
-#             newcomm = npref + comm[len(opref):]
-#             os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
-
-# ################################################################################
-
-# def comment_accept(changes_file, comments):
-#     Upload.pkg.changes_file = changes_file
-#     Upload.init_vars()
-#     Upload.update_vars()
-#     Upload.update_subst()
-#     files = Upload.pkg.files
-
-#     if not recheck():
-#         return # dak wants to REJECT, crap
-
-#     (new, byhand) = check_status(files)
-#     if not new and not byhand:
-#         do_accept()
-
-# ################################################################################
-
-# def comment_reject(changes_file, comments):
-#     Upload.pkg.changes_file = changes_file
-#     Upload.init_vars()
-#     Upload.update_vars()
-#     Upload.update_subst()
-
-#     if not recheck():
-#         pass # dak has its own reasons to reject as well, which is fine
-
-#     reject(comments)
-#     print "REJECT\n" + reject_message,
-#     if not Options["No-Action"]:
-#         Upload.do_reject(0, reject_message)
-#         os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-################################################################################
-
 def main():
     global Options, Logger, Sections, Priorities
 
@@ -1084,17 +902,16 @@ def main():
 
     Arguments = [('a',"automatic","Process-New::Options::Automatic"),
                  ('h',"help","Process-New::Options::Help"),
-                 ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
                  ('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
                  ('t',"trainee","Process-New::Options::Trainee"),
                  ('n',"no-action","Process-New::Options::No-Action")]
 
-    for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
+    for i in ["automatic", "help", "manual-reject", "no-action", "version", "trainee"]:
         if not cnf.has_key("Process-New::Options::%s" % (i)):
             cnf["Process-New::Options::%s" % (i)] = ""
 
     changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv)
-    if len(changes_files) == 0 and not cnf.get("Process-New::Options::Comments-Dir",""):
+    if len(changes_files) == 0:
         changes_files = utils.get_changes_files(cnf["Dir::Queue::New"])
 
     Options = cnf.SubTree("Process-New::Options")
@@ -1119,22 +936,13 @@ def main():
     # Kill me now? **FIXME**
     cnf["Dinstall::Options::No-Mail"] = ""
 
-#     commentsdir = cnf.get("Process-New::Options::Comments-Dir","")
-#     if commentsdir:
-#        if changes_files != []:
-#            sys.stderr.write("Can't specify any changes files if working with comments-dir")
-#            sys.exit(1)
-#        do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
-#        do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
-#     else:
-    if True:
-        for changes_file in changes_files:
-            changes_file = utils.validate_changes_file_arg(changes_file, 0)
-            if not changes_file:
-                continue
-            print "\n" + changes_file
-
-            do_pkg (changes_file, session)
+    for changes_file in changes_files:
+        changes_file = utils.validate_changes_file_arg(changes_file, 0)
+        if not changes_file:
+            continue
+        print "\n" + changes_file
+
+        do_pkg (changes_file, session)
 
     end()
 
index 5463f1a6521a7219332614396359719b13aca514..db29ac42a1a1220ddf9e30e332861049c42a5cac 100755 (executable)
@@ -507,7 +507,7 @@ def process_it(changes_file):
 
         action(u)
 
-    except SystemExit:
+    except (SystemExit, KeyboardInterrupt):
         raise
 
     except:
index 7d7fe9fe596a91082d816d77f8490e1661967c96..88d8e4e66e6425e91a6501c5c09af21a8e395b5a 100755 (executable)
@@ -107,10 +107,9 @@ Updates dak's database schema to the lastest version. You should disable crontab
 
         try:
             # Build a connect string
-#            connect_str = "dbname=%s"% (Cnf["DB::Name"])
-            connect_str = "dbname=%s"% "projectbstew"
-#            if Cnf["DB::Host"] != '': connect_str += " host=%s" % (Cnf["DB::Host"])
-#            if Cnf["DB::Port"] != '-1': connect_str += " port=%d" % (int(Cnf["DB::Port"]))
+            connect_str = "dbname=%s"% (Cnf["DB::Name"])
+            if Cnf["DB::Host"] != '': connect_str += " host=%s" % (Cnf["DB::Host"])
+            if Cnf["DB::Port"] != '-1': connect_str += " port=%d" % (int(Cnf["DB::Port"]))
 
             self.db = psycopg2.connect(connect_str)
 
@@ -177,12 +176,12 @@ Updates dak's database schema to the lastest version. You should disable crontab
 
         self.update_db()
 
-#STU        try:
-#STU            lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
-#STU            fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
-#STU        except IOError, e:
-#STU            if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
-#STU                utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
+        try:
+            lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
+            fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+        except IOError, e:
+            if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
+                utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
 
 
 ################################################################################
index dfcae368089f0c1d2a0d2e56b9e6e8fa218c6d6a..fb33b0bdab18160525e1c3183d828f1559abba0e 100755 (executable)
@@ -38,7 +38,7 @@ class Logger:
     logfile = None
     program = None
 
-    def __init__ (self, Cnf, program, debug=0):
+    def __init__ (self, Cnf, program, debug=0, print_starting=True):
         "Initialize a new Logger object"
         self.Cnf = Cnf
         self.program = program
@@ -58,7 +58,8 @@ class Logger:
             logfile = utils.open_file(logfilename, 'a')
             os.umask(umask)
         self.logfile = logfile
-        self.log(["program start"])
+        if print_starting:
+            self.log(["program start"])
 
     def log (self, details):
         "Log an event"
index 3c0bc50d38750c3dbecebb0280f1eb620aaf2fce..1a1902d5dd3da64dd9b35facecbf29e79abe8f7a 100755 (executable)
@@ -34,6 +34,7 @@
 ################################################################################
 
 import os
+import re
 import psycopg2
 import traceback
 
@@ -388,6 +389,28 @@ __all__.append('get_binary_components')
 
 ################################################################################
 
+class BinaryACL(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<BinaryACL %s>' % self.binary_acl_id
+
+__all__.append('BinaryACL')
+
+################################################################################
+
+class BinaryACLMap(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<BinaryACLMap %s>' % self.binary_acl_map_id
+
+__all__.append('BinaryACLMap')
+
+################################################################################
+
 class Component(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -815,6 +838,33 @@ class Fingerprint(object):
 
 __all__.append('Fingerprint')
 
+@session_wrapper
+def get_fingerprint(fpr, session=None):
+    """
+    Returns Fingerprint object for given fpr.
+
+    @type fpr: string
+    @param fpr: The fpr to find / add
+
+    @type session: SQLAlchemy
+    @param session: Optional SQL session object (a temporary one will be
+    generated if not supplied).
+
+    @rtype: Fingerprint
+    @return: the Fingerprint object for the given fpr or None
+    """
+
+    q = session.query(Fingerprint).filter_by(fingerprint=fpr)
+
+    try:
+        ret = q.one()
+    except NoResultFound:
+        ret = None
+
+    return ret
+
+__all__.append('get_fingerprint')
+
 @session_wrapper
 def get_or_set_fingerprint(fpr, session=None):
     """
@@ -852,20 +902,139 @@ __all__.append('get_or_set_fingerprint')
 
 ################################################################################
 
+# Helper routine for Keyring class
+def get_ldap_name(entry):
+    name = []
+    for k in ["cn", "mn", "sn"]:
+        ret = entry.get(k)
+        if ret and ret[0] != "" and ret[0] != "-":
+            name.append(ret[0])
+    return " ".join(name)
+
+################################################################################
+
 class Keyring(object):
+    gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
+                     " --with-colons --fingerprint --fingerprint"
+
+    keys = {}
+    fpr_lookup = {}
+
     def __init__(self, *args, **kwargs):
         pass
 
     def __repr__(self):
         return '<Keyring %s>' % self.keyring_name
 
+    def de_escape_gpg_str(self, str):
+        esclist = re.split(r'(\\x..)', str)
+        for x in range(1,len(esclist),2):
+            esclist[x] = "%c" % (int(esclist[x][2:],16))
+        return "".join(esclist)
+
+    def load_keys(self, keyring):
+        import email.Utils
+
+        if not self.keyring_id:
+            raise Exception('Must be initialized with database information')
+
+        k = os.popen(self.gpg_invocation % keyring, "r")
+        key = None
+        signingkey = False
+
+        for line in k.xreadlines():
+            field = line.split(":")
+            if field[0] == "pub":
+                key = field[4]
+                (name, addr) = email.Utils.parseaddr(field[9])
+                name = re.sub(r"\s*[(].*[)]", "", name)
+                if name == "" or addr == "" or "@" not in addr:
+                    name = field[9]
+                    addr = "invalid-uid"
+                name = self.de_escape_gpg_str(name)
+                self.keys[key] = {"email": addr}
+                if name != "":
+                    self.keys[key]["name"] = name
+                self.keys[key]["aliases"] = [name]
+                self.keys[key]["fingerprints"] = []
+                signingkey = True
+            elif key and field[0] == "sub" and len(field) >= 12:
+                signingkey = ("s" in field[11])
+            elif key and field[0] == "uid":
+                (name, addr) = email.Utils.parseaddr(field[9])
+                if name and name not in self.keys[key]["aliases"]:
+                    self.keys[key]["aliases"].append(name)
+            elif signingkey and field[0] == "fpr":
+                self.keys[key]["fingerprints"].append(field[9])
+                self.fpr_lookup[field[9]] = key
+
+    def import_users_from_ldap(self, session):
+        import ldap
+        cnf = Config()
+
+        LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
+        LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
+
+        l = ldap.open(LDAPServer)
+        l.simple_bind_s("","")
+        Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
+               "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
+               ["uid", "keyfingerprint", "cn", "mn", "sn"])
+
+        ldap_fin_uid_id = {}
+
+        byuid = {}
+        byname = {}
+
+        for i in Attrs:
+            entry = i[1]
+            uid = entry["uid"][0]
+            name = get_ldap_name(entry)
+            fingerprints = entry["keyFingerPrint"]
+            keyid = None
+            for f in fingerprints:
+                key = self.fpr_lookup.get(f, None)
+                if key not in self.keys:
+                    continue
+                self.keys[key]["uid"] = uid
+
+                if keyid != None:
+                    continue
+                keyid = get_or_set_uid(uid, session).uid_id
+                byuid[keyid] = (uid, name)
+                byname[uid] = (keyid, name)
+
+        return (byname, byuid)
+
+    def generate_users_from_keyring(self, format, session):
+        byuid = {}
+        byname = {}
+        any_invalid = False
+        for x in self.keys.keys():
+            if self.keys[x]["email"] == "invalid-uid":
+                any_invalid = True
+                self.keys[x]["uid"] = format % "invalid-uid"
+            else:
+                uid = format % self.keys[x]["email"]
+                keyid = get_or_set_uid(uid, session).uid_id
+                byuid[keyid] = (uid, self.keys[x]["name"])
+                byname[uid] = (keyid, self.keys[x]["name"])
+                self.keys[x]["uid"] = uid
+
+        if any_invalid:
+            uid = format % "invalid-uid"
+            keyid = get_or_set_uid(uid, session).uid_id
+            byuid[keyid] = (uid, "ungeneratable user id")
+            byname[uid] = (keyid, "ungeneratable user id")
+
+        return (byname, byuid)
+
 __all__.append('Keyring')
 
 @session_wrapper
-def get_or_set_keyring(keyring, session=None):
+def get_keyring(keyring, session=None):
     """
-    If C{keyring} does not have an entry in the C{keyrings} table yet, create one
-    and return the new Keyring
+    If C{keyring} does not have an entry in the C{keyrings} table yet, return None
     If C{keyring} already has an entry, simply return the existing Keyring
 
     @type keyring: string
@@ -880,12 +1049,20 @@ def get_or_set_keyring(keyring, session=None):
     try:
         return q.one()
     except NoResultFound:
-        obj = Keyring(keyring_name=keyring)
-        session.add(obj)
-        session.commit_or_flush()
-        return obj
+        return None
 
-__all__.append('get_or_set_keyring')
+__all__.append('get_keyring')
+
+################################################################################
+
+class KeyringACLMap(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<KeyringACLMap %s>' % self.keyring_acl_map_id
+
+__all__.append('KeyringACLMap')
 
 ################################################################################
 
@@ -1402,26 +1579,47 @@ class Queue(object):
 
                 session.add(qb)
 
-            exists, symlinked = utils.ensure_orig_files(changes, dest, session)
-
-            # Add symlinked files to the list of packages for later processing
-            # by apt-ftparchive
-            for filename in symlinked:
-                qb = QueueBuild()
-                qb.suite_id = s.suite_id
-                qb.queue_id = self.queue_id
-                qb.filename = filename
-                qb.in_queue = True
-                session.add(qb)
+            # If the .orig tarballs are in the pool, create a symlink to
+            # them (if one doesn't already exist)
+            for dsc_file in changes.dsc_files.keys():
+                # Skip all files except orig tarballs
+                from daklib.regexes import re_is_orig_source
+                if not re_is_orig_source.match(dsc_file):
+                    continue
+                # Skip orig files not identified in the pool
+                if not (changes.orig_files.has_key(dsc_file) and
+                        changes.orig_files[dsc_file].has_key("id")):
+                    continue
+                orig_file_id = changes.orig_files[dsc_file]["id"]
+                dest = os.path.join(dest_dir, dsc_file)
+
+                # If it doesn't exist, create a symlink
+                if not os.path.exists(dest):
+                    q = session.execute("SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id",
+                                        {'id': orig_file_id})
+                    res = q.fetchone()
+                    if not res:
+                        return "[INTERNAL ERROR] Couldn't find id %s in files table." % (orig_file_id)
+
+                    src = os.path.join(res[0], res[1])
+                    os.symlink(src, dest)
 
-            # Update files to ensure they are not removed prematurely
-            for filename in exists:
-                qb = get_queue_build(filename, s.suite_id, session)
-                if qb is None:
+                    # Add it to the list of packages for later processing by apt-ftparchive
+                    qb = QueueBuild()
+                    qb.suite_id = s.suite_id
+                    qb.queue_id = self.queue_id
+                    qb.filename = dest
                     qb.in_queue = True
-                    qb.last_used = None
                     session.add(qb)
 
+                # If it does, update things to ensure it's not removed prematurely
+                else:
+                    qb = get_queue_build(dest, s.suite_id, session)
+                    if qb is None:
+                        qb.in_queue = True
+                        qb.last_used = None
+                        session.add(qb)
+
         if privatetrans:
             session.commit()
             session.close()
@@ -1737,6 +1935,17 @@ __all__.append('get_source_in_suite')
 
 ################################################################################
 
+class SourceACL(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<SourceACL %s>' % self.source_acl_id
+
+__all__.append('SourceACL')
+
+################################################################################
+
 class SrcAssociation(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -2056,6 +2265,17 @@ __all__.append('get_uid_from_fingerprint')
 
 ################################################################################
 
+class UploadBlock(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
+
+__all__.append('UploadBlock')
+
+################################################################################
+
 class DBConn(Singleton):
     """
     database module init.
@@ -2074,6 +2294,8 @@ class DBConn(Singleton):
         self.tbl_archive = Table('archive', self.db_meta, autoload=True)
         self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True)
         self.tbl_binaries = Table('binaries', self.db_meta, autoload=True)
+        self.tbl_binary_acl = Table('binary_acl', self.db_meta, autoload=True)
+        self.tbl_binary_acl_map = Table('binary_acl_map', self.db_meta, autoload=True)
         self.tbl_component = Table('component', self.db_meta, autoload=True)
         self.tbl_config = Table('config', self.db_meta, autoload=True)
         self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True)
@@ -2083,6 +2305,7 @@ class DBConn(Singleton):
         self.tbl_files = Table('files', self.db_meta, autoload=True)
         self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True)
         self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True)
+        self.tbl_keyring_acl_map = Table('keyring_acl_map', self.db_meta, autoload=True)
         self.tbl_location = Table('location', self.db_meta, autoload=True)
         self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True)
         self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True)
@@ -2094,6 +2317,7 @@ class DBConn(Singleton):
         self.tbl_queue_build = Table('queue_build', self.db_meta, autoload=True)
         self.tbl_section = Table('section', self.db_meta, autoload=True)
         self.tbl_source = Table('source', self.db_meta, autoload=True)
+        self.tbl_source_acl = Table('source_acl', self.db_meta, autoload=True)
         self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True)
         self.tbl_src_format = Table('src_format', self.db_meta, autoload=True)
         self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True)
@@ -2101,6 +2325,7 @@ class DBConn(Singleton):
         self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True)
         self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True)
         self.tbl_uid = Table('uid', self.db_meta, autoload=True)
+        self.tbl_upload_blocks = Table('upload_blocks', self.db_meta, autoload=True)
 
     def __setupmappers(self):
         mapper(Architecture, self.tbl_architecture,
@@ -2137,6 +2362,14 @@ class DBConn(Singleton):
                                  binassociations = relation(BinAssociation,
                                                             primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
 
+        mapper(BinaryACL, self.tbl_binary_acl,
+               properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
+
+        mapper(BinaryACLMap, self.tbl_binary_acl_map,
+               properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
+                                 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
+                                 architecture = relation(Architecture)))
+
         mapper(Component, self.tbl_component,
                properties = dict(component_id = self.tbl_component.c.id,
                                  component_name = self.tbl_component.c.name))
@@ -2162,12 +2395,19 @@ class DBConn(Singleton):
                                  uid_id = self.tbl_fingerprint.c.uid,
                                  uid = relation(Uid),
                                  keyring_id = self.tbl_fingerprint.c.keyring,
-                                 keyring = relation(Keyring)))
+                                 keyring = relation(Keyring),
+                                 source_acl = relation(SourceACL),
+                                 binary_acl = relation(BinaryACL)))
 
         mapper(Keyring, self.tbl_keyrings,
                properties = dict(keyring_name = self.tbl_keyrings.c.name,
                                  keyring_id = self.tbl_keyrings.c.id))
 
+        mapper(KeyringACLMap, self.tbl_keyring_acl_map,
+               properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
+                                 keyring = relation(Keyring, backref="keyring_acl_map"),
+                                 architecture = relation(Architecture)))
+
         mapper(Location, self.tbl_location,
                properties = dict(location_id = self.tbl_location.c.id,
                                  component_id = self.tbl_location.c.component,
@@ -2228,7 +2468,11 @@ class DBConn(Singleton):
                                  srcfiles = relation(DSCFile,
                                                      primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
                                  srcassociations = relation(SrcAssociation,
-                                                            primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source))))
+                                                            primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
+                                 srcuploaders = relation(SrcUploader)))
+
+        mapper(SourceACL, self.tbl_source_acl,
+               properties = dict(source_acl_id = self.tbl_source_acl.c.id))
 
         mapper(SrcAssociation, self.tbl_src_associations,
                properties = dict(sa_id = self.tbl_src_associations.c.id,
@@ -2269,6 +2513,11 @@ class DBConn(Singleton):
                properties = dict(uid_id = self.tbl_uid.c.id,
                                  fingerprint = relation(Fingerprint)))
 
+        mapper(UploadBlock, self.tbl_upload_blocks,
+               properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
+                                 fingerprint = relation(Fingerprint, backref="uploadblocks"),
+                                 uid = relation(Uid, backref="uploadblocks")))
+
     ## Connection functions
     def __createconn(self):
         from config import Config
diff --git a/daklib/formats.py b/daklib/formats.py
new file mode 100644 (file)
index 0000000..aaad271
--- /dev/null
@@ -0,0 +1,45 @@
+from regexes import re_verwithext
+from dak_exceptions import UnknownFormatError
+
+def parse_format(txt):
+    """
+    Parse a .changes Format string into a tuple representation for easy
+    comparison.
+
+    >>> parse_format('1.0')
+    (1, 0)
+    >>> parse_format('8.4 (hardy)')
+    (8, 4, 'hardy')
+
+    If the format doesn't match these forms, raises UnknownFormatError.
+    """
+
+    format = re_verwithext.search(txt)
+
+    if format is None:
+        raise UnknownFormatError, txt
+
+    format = format.groups()
+
+    if format[1] is None:
+        format = int(float(format[0])), 0, format[2]
+    else:
+        format = int(format[0]), int(format[1]), format[2]
+
+    if format[2] is None:
+        format = format[:2]
+
+    return format
+
+def validate_changes_format(format, field):
+    """
+    Validate a tuple-representation of a .changes Format: field. Raises
+    UnknownFormatError if the field is invalid, otherwise return type is
+    undefined.
+    """
+
+    if (format < (1, 5) or format > (1, 8)):
+        raise UnknownFormatError, repr(format)
+
+    if field != 'files' and format < (1, 8):
+        raise UnknownFormatError, repr(format)
index f7d999a3da7220135d89d522ac87948775182199..eb55b251a73ddaef6f09bbbf6ba532593041a960 100755 (executable)
@@ -213,28 +213,14 @@ def check_valid(new):
 
 ###############################################################################
 
-def lookup_uid_from_fingerprint(fpr, session):
-    uid = None
-    uid_name = ""
-    # This is a stupid default, but see the comments below
-    is_dm = False
-
-    user = get_uid_from_fingerprint(fpr, session)
-
-    if user is not None:
-        uid = user.uid
-        if user.name is None:
-            uid_name = ''
-        else:
-            uid_name = user.name
-
-        # Check the relevant fingerprint (which we have to have)
-        for f in user.fingerprint:
-            if f.fingerprint == fpr:
-                is_dm = f.keyring.debian_maintainer
-                break
-
-    return (uid, uid_name, is_dm)
+def check_status(files):
+    new = byhand = 0
+    for f in files.keys():
+        if files[f]["type"] == "byhand":
+            byhand = 1
+        elif files[f].has_key("new"):
+            new = 1
+    return (new, byhand)
 
 ###############################################################################
 
@@ -287,19 +273,20 @@ class Upload(object):
         self.pkg.reset()
 
     def package_info(self):
-        msg = ''
-
-        if len(self.rejects) > 0:
-            msg += "Reject Reasons:\n"
-            msg += "\n".join(self.rejects)
+        """
+        Format various messages from this Upload to send to the maintainer.
+        """
 
-        if len(self.warnings) > 0:
-            msg += "Warnings:\n"
-            msg += "\n".join(self.warnings)
+        msgs = (
+            ('Reject Reasons', self.rejects),
+            ('Warnings', self.warnings),
+            ('Notes', self.notes),
+        )
 
-        if len(self.notes) > 0:
-            msg += "Notes:\n"
-            msg += "\n".join(self.notes)
+        msg = ''
+        for title, messages in msgs:
+            if messages:
+                msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
 
         return msg
 
@@ -1025,16 +1012,21 @@ class Upload(object):
 
     ###########################################################################
 
-    def ensure_all_source_exists(self, dest_dir=None):
-        """
-        Ensure that dest_dir contains all the orig tarballs for the specified
-        changes. If it does not, symlink them into place.
+    def get_changelog_versions(self, source_dir):
+        """Extracts a the source package and (optionally) grabs the
+        version history out of debian/changelog for the BTS."""
 
-        If dest_dir is None, populate the current directory.
-        """
+        cnf = Config()
+
+        # Find the .dsc (again)
+        dsc_filename = None
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f]["type"] == "dsc":
+                dsc_filename = f
 
-        if dest_dir is None:
-            dest_dir = os.getcwd()
+        # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
+        if not dsc_filename:
+            return
 
         # Create a symlink mirror of the source files in our temporary directory
         for f in self.pkg.files.keys():
@@ -1045,8 +1037,8 @@ class Upload(object):
                 if not os.path.exists(src):
                     return
                 ftype = m.group(3)
-                if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \
-                   pkg.orig_files[f].has_key("path"):
+                if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
+                   self.pkg.orig_files[f].has_key("path"):
                     continue
                 dest = os.path.join(os.getcwd(), f)
                 os.symlink(src, dest)
@@ -1059,26 +1051,6 @@ class Upload(object):
             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
 
-    ###########################################################################
-
-    def get_changelog_versions(self, source_dir):
-        """Extracts a the source package and (optionally) grabs the
-        version history out of debian/changelog for the BTS."""
-
-        cnf = Config()
-
-        # Find the .dsc (again)
-        dsc_filename = None
-        for f in self.pkg.files.keys():
-            if self.pkg.files[f]["type"] == "dsc":
-                dsc_filename = f
-
-        # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
-        if not dsc_filename:
-            return
-
-        self.ensure_all_source_exists()
-
         # Extract the source
         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
         (result, output) = commands.getstatusoutput(cmd)
@@ -1211,7 +1183,98 @@ class Upload(object):
         self.ensure_hashes()
 
     ###########################################################################
+
+    def ensure_orig(self, target_dir='.', session=None):
+        """
+        Ensures that all orig files mentioned in the changes file are present
+        in target_dir. If they do not exist, they are symlinked into place.
+
+        An list containing the symlinks that were created are returned (so they
+        can be removed).
+        """
+
+        symlinked = []
+        cnf = Config()
+
+        for filename, entry in self.pkg.dsc_files.iteritems():
+            if not re_is_orig_source.match(filename):
+                # File is not an orig; ignore
+                continue
+
+            if os.path.exists(filename):
+                # File exists, no need to continue
+                continue
+
+            def symlink_if_valid(path):
+                f = utils.open_file(path)
+                md5sum = apt_pkg.md5sum(f)
+                f.close()
+
+                fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
+                expected = (int(entry['size']), entry['md5sum'])
+
+                if fingerprint != expected:
+                    return False
+
+                dest = os.path.join(target_dir, filename)
+
+                os.symlink(path, dest)
+                symlinked.append(dest)
+
+                return True
+
+            session_ = session
+            if session is None:
+                session_ = DBConn().session()
+
+            found = False
+
+            # Look in the pool
+            for poolfile in get_poolfile_like_name('/%s' % filename, session_):
+                poolfile_path = os.path.join(
+                    poolfile.location.path, poolfile.filename
+                )
+
+                if symlink_if_valid(poolfile_path):
+                    found = True
+                    break
+
+            if session is None:
+                session_.close()
+
+            if found:
+                continue
+
+            # Look in some other queues for the file
+            queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
+                'OldProposedUpdates', 'Embargoed', 'Unembargoed')
+
+            for queue in queues:
+                if not cnf.get('Dir::Queue::%s' % queue):
+                    continue
+
+                queuefile_path = os.path.join(
+                    cnf['Dir::Queue::%s' % queue], filename
+                )
+
+                if not os.path.exists(queuefile_path):
+                    # Does not exist in this queue
+                    continue
+
+                if symlink_if_valid(queuefile_path):
+                    break
+
+        return symlinked
+
+    ###########################################################################
+
     def check_lintian(self):
+        cnf = Config()
+
+        # Don't reject binary uploads
+        if not self.pkg.changes['architecture'].has_key('source'):
+            return
+
         # Only check some distributions
         valid_dist = False
         for dist in ('unstable', 'experimental'):
@@ -1222,13 +1285,11 @@ class Upload(object):
         if not valid_dist:
             return
 
-        self.ensure_all_source_exists()
-
-        cnf = Config()
         tagfile = cnf.get("Dinstall::LintianTags")
         if tagfile is None:
             # We don't have a tagfile, so just don't do anything.
             return
+
         # Parse the yaml file
         sourcefile = file(tagfile, 'r')
         sourcecontent = sourcefile.read()
@@ -1239,6 +1300,9 @@ class Upload(object):
             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
             return
 
+        # Try and find all orig mentioned in the .dsc
+        symlinked = self.ensure_orig()
+
         # Now setup the input file for lintian. lintian wants "one tag per line" only,
         # so put it together like it. We put all types of tags in one file and then sort
         # through lintians output later to see if its a fatal tag we detected, or not.
@@ -1258,8 +1322,12 @@ class Upload(object):
         # to then parse it.
         command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
         (result, output) = commands.getstatusoutput(command)
-        # We are done with lintian, remove our tempfile
+
+        # We are done with lintian, remove our tempfile and any symlinks we created
         os.unlink(temp_filename)
+        for symlink in symlinked:
+            os.unlink(symlink)
+
         if (result == 2):
             utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
             utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
@@ -1267,6 +1335,10 @@ class Upload(object):
         if len(output) == 0:
             return
 
+        def log(*txt):
+            if self.logger:
+                self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
+
         # We have output of lintian, this package isn't clean. Lets parse it and see if we
         # are having a victim for a reject.
         # W: tzdata: binary-without-manpage usr/sbin/tzconfig
@@ -1293,12 +1365,16 @@ class Upload(object):
                 elif etag in lintiantags['error']:
                     # The tag is overriden - but is not allowed to be
                     self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
+                    log("ftpmaster does not allow tag to be overridable", etag)
             else:
                 # Tag is known, it is not overriden, direct reject.
                 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
                 # Now tell if they *might* override it.
                 if etag in lintiantags['warning']:
+                    log("auto rejecting", "overridable", etag)
                     self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
+                else:
+                    log("auto rejecting", "not overridable", etag)
 
     ###########################################################################
     def check_urgency(self):
@@ -1361,7 +1437,201 @@ class Upload(object):
                 except:
                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
 
+    def check_if_upload_is_sponsored(self, uid_email, uid_name):
+        if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
+            sponsored = False
+        elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
+            sponsored = False
+            if uid_name == "":
+                sponsored = True
+        else:
+            sponsored = True
+            if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
+                sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
+                if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
+                    self.pkg.changes["changedbyemail"] not in sponsor_addresses):
+                        self.pkg.changes["sponsoremail"] = uid_email
+
+        return sponsored
+
+
+    ###########################################################################
+    # check_signed_by_key checks
     ###########################################################################
+
+    def check_signed_by_key(self):
+        """Ensure the .changes is signed by an authorized uploader."""
+        session = DBConn().session()
+
+        # First of all we check that the person has proper upload permissions
+        # and that this upload isn't blocked
+        fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
+
+        if fpr is None:
+            self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
+            return
+
+        # TODO: Check that import-keyring adds UIDs properly
+        if not fpr.uid:
+            self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
+            return
+
+        # Check that the fingerprint which uploaded has permission to do so
+        self.check_upload_permissions(fpr, session)
+
+        # Check that this package is not in a transition
+        self.check_transition(session)
+
+        session.close()
+
+
+    def check_upload_permissions(self, fpr, session):
+        # Check any one-off upload blocks
+        self.check_upload_blocks(fpr, session)
+
+        # Start with DM as a special case
+        # DM is a special case unfortunately, so we check it first
+        # (keys with no source access get more access than DMs in one
+        #  way; DMs can only upload for their packages whether source
+        #  or binary, whereas keys with no access might be able to
+        #  upload some binaries)
+        if fpr.source_acl.access_level == 'dm':
+            self.check_dm_source_upload(fpr, session)
+        else:
+            # Check source-based permissions for other types
+            if self.pkg.changes["architecture"].has_key("source"):
+                if fpr.source_acl.access_level is None:
+                    rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+                    rej += '\nPlease contact ftpmaster if you think this is incorrect'
+                    self.rejects.append(rej)
+                    return
+            else:
+                # If not a DM, we allow full upload rights
+                uid_email = "%s@debian.org" % (fpr.uid.uid)
+                self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
+
+
+        # Check binary upload permissions
+        # By this point we know that DMs can't have got here unless they
+        # are allowed to deal with the package concerned so just apply
+        # normal checks
+        if fpr.binary_acl.access_level == 'full':
+            return
+
+        # Otherwise we're in the map case
+        tmparches = self.pkg.changes["architecture"].copy()
+        tmparches.pop('source', None)
+
+        for bam in fpr.binary_acl_map:
+            tmparches.pop(bam.architecture.arch_string, None)
+
+        if len(tmparches.keys()) > 0:
+            if fpr.binary_reject:
+                rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+                rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+                self.rejects.append(rej)
+            else:
+                # TODO: This is where we'll implement reject vs throw away binaries later
+                rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
+                rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
+                rej += "\nFingerprint: %s", (fpr.fingerprint)
+                self.rejects.append(rej)
+
+
+    def check_upload_blocks(self, fpr, session):
+        """Check whether any upload blocks apply to this source, source
+           version, uid / fpr combination"""
+
+        def block_rej_template(fb):
+            rej = 'Manual upload block in place for package %s' % fb.source
+            if fb.version is not None:
+                rej += ', version %s' % fb.version
+            return rej
+
+        for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
+            # version is None if the block applies to all versions
+            if fb.version is None or fb.version == self.pkg.changes['version']:
+                # Check both fpr and uid - either is enough to cause a reject
+                if fb.fpr is not None:
+                    if fb.fpr.fingerprint == fpr.fingerprint:
+                        self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
+                if fb.uid is not None:
+                    if fb.uid == fpr.uid:
+                        self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
+
+
+    def check_dm_upload(self, fpr, session):
+        # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
+        ## none of the uploaded packages are NEW
+        rej = False
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f].has_key("byhand"):
+                self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
+                rej = True
+            if self.pkg.files[f].has_key("new"):
+                self.rejects.append("%s may not upload NEW file %s" % (uid, f))
+                rej = True
+
+        if rej:
+            return
+
+        ## the most recent version of the package uploaded to unstable or
+        ## experimental includes the field "DM-Upload-Allowed: yes" in the source
+        ## section of its control file
+        q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
+        q = q.join(SrcAssociation)
+        q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
+        q = q.order_by(desc('source.version')).limit(1)
+
+        r = q.all()
+
+        if len(r) != 1:
+            rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
+            self.rejects.append(rej)
+            return
+
+        r = r[0]
+        if not r.dm_upload_allowed:
+            rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
+            self.rejects.append(rej)
+            return
+
+        ## the Maintainer: field of the uploaded .changes file corresponds with
+        ## the owner of the key used (ie, non-developer maintainers may not sponsor
+        ## uploads)
+        if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
+            self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
+
+        ## the most recent version of the package uploaded to unstable or
+        ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
+        ## non-developer maintainers cannot NMU or hijack packages)
+
+        # srcuploaders includes the maintainer
+        accept = False
+        for sup in r.srcuploaders:
+            (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
+            # Eww - I hope we never have two people with the same name in Debian
+            if email == fpr.uid.uid or name == fpr.uid.name:
+                accept = True
+                break
+
+        if not accept:
+            self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
+            return
+
+        ## none of the packages are being taken over from other source packages
+        for b in self.pkg.changes["binary"].keys():
+            for suite in self.pkg.changes["distribution"].keys():
+                q = session.query(DBSource)
+                q = q.join(DBBinary).filter_by(package=b)
+                q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
+
+                for s in q.all():
+                    if s.source != self.pkg.changes["source"]:
+                        self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
+
+
+
     def check_transition(self, session):
         cnf = Config()
 
@@ -1434,92 +1704,9 @@ transition is done."""
                     return
 
     ###########################################################################
-    def check_signed_by_key(self):
-        """Ensure the .changes is signed by an authorized uploader."""
-        session = DBConn().session()
-
-        self.check_transition(session)
-
-        (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
-
-        # match claimed name with actual name:
-        if uid is None:
-            # This is fundamentally broken but need us to refactor how we get
-            # the UIDs/Fingerprints in order for us to fix it properly
-            uid, uid_email = self.pkg.changes["fingerprint"], uid
-            may_nmu, may_sponsor = 1, 1
-            # XXX by default new dds don't have a fingerprint/uid in the db atm,
-            #     and can't get one in there if we don't allow nmu/sponsorship
-        elif is_dm is False:
-            # If is_dm is False, we allow full upload rights
-            uid_email = "%s@debian.org" % (uid)
-            may_nmu, may_sponsor = 1, 1
-        else:
-            # Assume limited upload rights unless we've discovered otherwise
-            uid_email = uid
-            may_nmu, may_sponsor = 0, 0
-
-        if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
-            sponsored = 0
-        elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
-            sponsored = 0
-            if uid_name == "": sponsored = 1
-        else:
-            sponsored = 1
-            if ("source" in self.pkg.changes["architecture"] and
-                uid_email and utils.is_email_alias(uid_email)):
-                sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
-                if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
-                    self.pkg.changes["changedbyemail"] not in sponsor_addresses):
-                    self.pkg.changes["sponsoremail"] = uid_email
-
-        if sponsored and not may_sponsor:
-            self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
-
-        if not sponsored and not may_nmu:
-            should_reject = True
-            highest_sid, highest_version = None, None
-
-            # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
-            #      It ignores higher versions with the dm_upload_allowed flag set to false
-            #      I'm keeping the existing behaviour for now until I've gone back and
-            #      checked exactly what the GR says - mhy
-            for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
-                if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
-                     highest_sid = si.source_id
-                     highest_version = si.version
-
-            if highest_sid is None:
-                self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
-            else:
-                for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
-                    (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
-                    if email == uid_email or name == uid_name:
-                        should_reject = False
-                        break
-
-            if should_reject is True:
-                self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
-
-            for b in self.pkg.changes["binary"].keys():
-                for suite in self.pkg.changes["distribution"].keys():
-                    q = session.query(DBSource)
-                    q = q.join(DBBinary).filter_by(package=b)
-                    q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
-
-                    for s in q.all():
-                        if s.source != self.pkg.changes["source"]:
-                            self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
-
-            for f in self.pkg.files.keys():
-                if self.pkg.files[f].has_key("byhand"):
-                    self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
-                if self.pkg.files[f].has_key("new"):
-                    self.rejects.append("%s may not upload NEW file %s" % (uid, f))
-
-        session.close()
-
+    # End check_signed_by_key checks
     ###########################################################################
+
     def build_summaries(self):
         """ Build a summary of changes the upload introduces. """
 
@@ -2283,6 +2470,44 @@ distribution."""
             if actual_size != int(dsc_entry["size"]):
                 self.rejects.append("size for %s doesn't match %s." % (found, file))
 
+    ################################################################################
+    # This is used by process-new and process-holding to recheck a changes file
+    # at the time we're running.  It mainly wraps various other internal functions
+    # and is similar to accepted_checks - these should probably be tidied up
+    # and combined
+    def recheck(self, session):
+        cnf = Config()
+        for f in self.pkg.files.keys():
+            # The .orig.tar.gz can disappear out from under us is it's a
+            # duplicate of one in the archive.
+            if not self.pkg.files.has_key(f):
+                continue
+
+            entry = self.pkg.files[f]
+
+            # Check that the source still exists
+            if entry["type"] == "deb":
+                source_version = entry["source version"]
+                source_package = entry["source package"]
+                if not self.pkg.changes["architecture"].has_key("source") \
+                   and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+                    source_epochless_version = re_no_epoch.sub('', source_version)
+                    dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
+                    found = False
+                    for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
+                        if cnf.has_key("Dir::Queue::%s" % (q)):
+                            if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+                                found = True
+                    if not found:
+                        self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
+
+            # Version and file overwrite checks
+            if entry["type"] == "deb":
+                self.check_binary_against_db(f, session)
+            elif entry["type"] == "dsc":
+                self.check_source_against_db(f, session)
+                self.check_dsc_against_db(f, session)
+
     ################################################################################
     def accepted_checks(self, overwrite_checks, session):
         # Recheck anything that relies on the database; since that's not
index ade3c45388b6a32d2e27c27d15a4a33a11e98b5a..7d7dd940e3837c1c5c99a7f8708ae3663ae53d44 100644 (file)
@@ -1,6 +1,5 @@
 import re
 
-from regexes import re_verwithext
 from dak_exceptions import UnknownFormatError
 
 srcformats = []
@@ -18,36 +17,6 @@ def get_format_from_string(txt):
 
     raise UnknownFormatError, "Unknown format %r" % txt
 
-def parse_format(txt):
-    """
-    Parse a .changes Format string into a tuple representation for easy
-    comparison.
-
-    >>> parse_format('1.0')
-    (1, 0)
-    >>> parse_format('8.4 (hardy)')
-    (8, 4, 'hardy')
-
-    If the format doesn't match these forms, raises UnknownFormatError.
-    """
-
-    format = re_verwithext.search(txt)
-
-    if format is None:
-        raise UnknownFormatError, txt
-
-    format = format.groups()
-
-    if format[1] is None:
-        format = int(float(format[0])), 0, format[2]
-    else:
-        format = int(format[0]), int(format[1]), format[2]
-
-    if format[2] is None:
-        format = format[:2]
-
-    return format
-
 class SourceFormat(type):
     def __new__(cls, name, bases, attrs):
         klass = super(SourceFormat, cls).__new__(cls, name, bases, attrs)
@@ -70,15 +39,6 @@ class SourceFormat(type):
             if has[key]:
                 yield "contains source files not allowed in format %s" % cls.name
 
-    @classmethod
-    def validate_format(cls, format, is_a_dsc=False, field='files'):
-        """
-        Raises UnknownFormatError if the specified format tuple is not valid for
-        this format (for example, the format (1, 0) is not valid for the
-        "3.0 (quilt)" format). Return value is undefined in all other cases.
-        """
-        pass
-
 class FormatOne(SourceFormat):
     __metaclass__ = SourceFormat
 
@@ -101,19 +61,6 @@ class FormatOne(SourceFormat):
         for msg in super(FormatOne, cls).reject_msgs(has):
             yield msg
 
-    @classmethod
-    def validate_format(cls, format, is_a_dsc=False, field='files'):
-        msg = "Invalid format %s definition: %r" % (cls.name, format)
-
-        if is_a_dsc:
-            if format != (1, 0):
-                raise UnknownFormatError, msg
-        else:
-            if (format < (1,5) or format > (1,8)):
-                raise UnknownFormatError, msg
-            if field != "files" and format < (1,8):
-                raise UnknownFormatError, msg
-
 class FormatThree(SourceFormat):
     __metaclass__ = SourceFormat
 
@@ -123,12 +70,6 @@ class FormatThree(SourceFormat):
     requires = ('native_tar',)
     disallowed = ('orig_tar', 'debian_diff', 'debian_tar', 'more_orig_tar')
 
-    @classmethod
-    def validate_format(cls, format, **kwargs):
-        if format != (3, 0, 'native'):
-            raise UnknownFormatError, "Invalid format %s definition: %r" % \
-                (cls.name, format)
-
 class FormatThreeQuilt(SourceFormat):
     __metaclass__ = SourceFormat
 
@@ -137,9 +78,3 @@ class FormatThreeQuilt(SourceFormat):
 
     requires = ('orig_tar', 'debian_tar')
     disallowed = ('debian_diff', 'native_tar')
-
-    @classmethod
-    def validate_format(cls, format, **kwargs):
-        if format != (3, 0, 'quilt'):
-            raise UnknownFormatError, "Invalid format %s definition: %r" % \
-                (cls.name, format)
index ec1cd36686f8b3f943723b430f9850a1a061e2ab..21446fbe9f435685f8c2dcd0c2c8585f7db1b41c 100755 (executable)
@@ -48,13 +48,13 @@ from regexes import re_html_escaping, html_escaping, re_single_line_field, \
                     re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
                     re_is_orig_source
 
+from formats import parse_format, validate_changes_format
 from srcformats import get_format_from_string
 from collections import defaultdict
 
 ################################################################################
 
-#default_config = "/etc/dak/dak.conf"     #: default dak config, defines host properties
-default_config = "/home/stew/etc/dak/dak.conf"     #: default dak config, defines host properties
+default_config = "/etc/dak/dak.conf"     #: default dak config, defines host properties
 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
 
 alias_cache = None        #: Cache for email alias checks
@@ -528,9 +528,9 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
     if not changes.has_key(field):
         raise NoFilesFieldError
 
-    # Get SourceFormat object for this Format and validate it
-    format = get_format_from_string(changes['format'])
-    format.validate_format(is_a_dsc=is_a_dsc, field=field)
+    # Validate .changes Format: field
+    if not is_a_dsc:
+        validate_changes_format(parse_format(changes['format']), field)
 
     includes_section = (not is_a_dsc) and field == "files"
 
@@ -1505,52 +1505,5 @@ apt_pkg.init()
 Cnf = apt_pkg.newConfiguration()
 apt_pkg.ReadConfigFileISC(Cnf,default_config)
 
-#if which_conf_file() != default_config:
-#    apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
-
-###############################################################################
-
-def ensure_orig_files(changes, dest_dir, session):
-    """
-    Ensure that dest_dir contains all the orig tarballs for the specified
-    changes. If it does not, symlink them into place.
-
-    Returns a 2-tuple (already_exists, symlinked) containing a list of files
-    that were already there and a list of files that were symlinked into place.
-    """
-
-    exists, symlinked = [], []
-
-    for dsc_file in changes.dsc_files:
-
-        # Skip all files that are not orig tarballs
-        if not re_is_orig_source.match(dsc_file):
-            continue
-
-        # Skip orig files not identified in the pool
-        if not (dsc_file in changes.orig_files and
-                'id' in changes.orig_files[dsc_file]):
-            continue
-
-        dest = os.path.join(dest_dir, dsc_file)
-
-        if os.path.exists(dest):
-            exists.append(dest)
-            continue
-
-        orig_file_id = changes.orig_files[dsc_file]['id']
-
-        c = session.execute(
-            'SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id',
-            {'id': orig_file_id}
-        )
-
-        res = c.fetchone()
-        if not res:
-            return "[INTERNAL ERROR] Couldn't find id %s in files table." % orig_file_id
-
-        src = os.path.join(res[0], res[1])
-        os.symlink(src, dest)
-        symlinked.append(dest)
-
-    return (exists, symlinked)
+if which_conf_file() != default_config:
+    apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
index e531a241e5a41723bd2c3098e9c7ff5b797a2fc6..d6bd125b88b62dbd6a550fc7158d0894ea1f8f71 100644 (file)
@@ -367,3 +367,10 @@ Canadians: This is a lighthouse. Your call.
 <dak> mhy: Error: "!!!11111iiiiiioneoneoneone" is not a valid command.
 <mhy> dak: oh shut up
 <dak> mhy: Error: "oh" is not a valid command.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+<sgran> hey, I think something's wrong with your git repo
+<sgran> when I git pulled this last time, I got something that looked almost like python instead of dak
+<mhy> sgran: slander
+<sgran> sorry, I take it back, I've had a better look now
index 0a004f3899956e22e4f0580dc2f56c126b945879..67db55168ae5f16852447413a94ac418314aad2d 100755 (executable)
@@ -95,6 +95,10 @@ mv "$TMPDIR/installer-$ARCH/current"  "$TARGET"
 find "$TARGET/$VERSION" -type d -exec chmod 755 {} +
 find "$TARGET/$VERSION" -type f -exec chmod 644 {} +
 
+# Make sure nothing symlinks outside of the ftpdir
+# Shouldnt happen, but better be sure.
+symlinks -d -r /srv/ftp.debian.org/ftp
+
 trap - EXIT
 cleanup
 
index b9d31a021bd579e054a15f42024f2f36d4afb981..c16fde6aa77eb4efbe615d1e472cc2947a9c7c9a 100755 (executable)
@@ -27,7 +27,7 @@ echo "Generating sources list..."
   cd $base/ftp
   find ./dists -maxdepth 1 \! -type d
   find ./dists \! -type d | grep "/source/"
-) | sort -u | gzip -9 > source.list.gz
+) | sort -u | gzip --rsyncable -9 > source.list.gz
 
 echo "Generating arch lists..."
 
@@ -39,7 +39,7 @@ for a in $ARCHES; do
    cd $base/ftp
    find ./dists -maxdepth 1 \! -type d
    find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
-  ) | sort -u | gzip -9 > arch-$a.list.gz
+  ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
 done
 
 echo "Generating suite lists..."
@@ -62,7 +62,7 @@ printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb |
       done
      )
      suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,'
-    ) | sort -u | gzip -9 > suite-${suite}.list.gz
+    ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
   done
 
 echo "Finding everything on the ftp site to generate sundries $(date +"%X")..."
@@ -83,7 +83,7 @@ done
 
 (cd $base/ftp/
        for dist in sid squeeze; do
-               find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
+               find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
        done
 )
 
index 19363f1f89d43d360132f1975427794ce333ec4e..231f7f8c131782c88e20d5d0d81066590717bd63 100755 (executable)
@@ -26,11 +26,11 @@ if [ -r ${filename}.gz ] ; then
   mv -f ${filename}.gz $filename.old.gz
   mv -f .$filename.new $filename
   rm -f $filename.patch.gz
-  zcat $filename.old.gz | diff -u - $filename | gzip -9cfn - >$filename.patch.gz
+  zcat $filename.old.gz | diff -u - $filename | gzip --rsyncable -9cfn - >$filename.patch.gz
   rm -f $filename.old.gz
 else
   mv -f .$filename.new $filename
 fi
 
-gzip -9cfN $filename >$filename.gz
+gzip --rsyncable -9cfN $filename >$filename.gz
 rm -f $filename
index a0abaa1f7cf5fac627e022f00bc8ef7475edc38b..41e8727c6b3e267214cea7f63bd8effccb2d6765 100755 (executable)
@@ -17,7 +17,7 @@ set -e
 if [ $rc = 1 ] || [ ! -f Maintainers ] ; then
        echo -n "installing Maintainers ... "
        mv -f .new-maintainers Maintainers
-       gzip -9v <Maintainers >.new-maintainers.gz
+       gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
        mv -f .new-maintainers.gz Maintainers.gz
 elif [ $rc = 0 ] ; then
        echo '(same as before)'
diff --git a/tests/test_formats.py b/tests/test_formats.py
new file mode 100755 (executable)
index 0000000..1ae6860
--- /dev/null
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+import unittest
+
+import os, sys
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+from daklib.formats import parse_format, validate_changes_format
+from daklib.dak_exceptions import UnknownFormatError
+
+class ParseFormatTestCase(unittest.TestCase):
+    def assertParse(self, format, expected):
+        self.assertEqual(parse_format(format), expected)
+
+    def assertParseFail(self, format):
+        self.assertRaises(
+            UnknownFormatError,
+            lambda: parse_format(format)
+        )
+
+    def testParse(self):
+        self.assertParse('1.0', (1, 0))
+
+    def testEmpty(self):
+        self.assertParseFail('')
+        self.assertParseFail(' ')
+        self.assertParseFail('  ')
+
+    def textText(self):
+        self.assertParse('1.2 (three)', (1, 2, 'three'))
+        self.assertParseFail('0.0 ()')
+
+class ValidateChangesFormat(unittest.TestCase):
+    def assertValid(self, changes, field='files'):
+        validate_changes_format(changes, field)
+
+    def assertInvalid(self, *args, **kwargs):
+        self.assertRaises(
+            UnknownFormatError,
+            lambda: self.assertValid(*args, **kwargs)
+        )
+
+    ##
+
+    def testBinary(self):
+        self.assertValid((1, 5))
+        self.assertValid((1, 8))
+        self.assertInvalid((1, 0))
+
+    def testRange(self):
+        self.assertInvalid((1, 3))
+        self.assertValid((1, 5))
+        self.assertValid((1, 8))
+        self.assertInvalid((1, 9))
+
+    def testFilesField(self):
+        self.assertInvalid((1, 7), field='notfiles')
+        self.assertValid((1, 8), field='notfiles')
index f6d7215fc8daabcd179543ff59691263ccbb9d23..4ecaf8b7fcc83925238f2473ed7961714158eee2 100755 (executable)
@@ -8,6 +8,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 from collections import defaultdict
 
 from daklib import srcformats
+from daklib.formats import parse_format
 from daklib.dak_exceptions import UnknownFormatError
 
 class SourceFormatTestCase(unittest.TestCase):
@@ -103,89 +104,6 @@ class FormatTreeQuiltTestCase(SourceFormatTestCase):
             'native_tar': 1,
         })
 
-##
-
-class ParseFormatTestCase(unittest.TestCase):
-    def assertParse(self, format, expected):
-        self.assertEqual(srcformats.parse_format(format), expected)
-
-    def assertParseFail(self, format):
-        self.assertRaises(
-            UnknownFormatError,
-            lambda: srcformats.parse_format(format)
-        )
-
-    def testParse(self):
-        self.assertParse('1.0', (1, 0))
-
-    def testEmpty(self):
-        self.assertParseFail('')
-        self.assertParseFail(' ')
-        self.assertParseFail('  ')
-
-    def textText(self):
-        self.assertParse('1.2 (three)', (1, 2, 'three'))
-        self.assertParseFail('0.0 ()')
-
-class ValidateFormatTestCase(unittest.TestCase):
-    def assertValid(self, format, **kwargs):
-        kwargs['is_a_dsc'] = kwargs.get('is_a_dsc', True)
-        self.fmt.validate_format(format, **kwargs)
-
-    def assertInvalid(self, *args, **kwargs):
-        self.assertRaises(
-            UnknownFormatError,
-            lambda: self.assertValid(*args, **kwargs),
-        )
-
-class ValidateFormatOneTestCase(ValidateFormatTestCase):
-    fmt = srcformats.FormatOne
-
-    def testValid(self):
-        self.assertValid((1, 0))
-
-    def testInvalid(self):
-        self.assertInvalid((0, 1))
-        self.assertInvalid((3, 0, 'quilt'))
-
-    ##
-
-    def testBinary(self):
-        self.assertValid((1, 5), is_a_dsc=False)
-        self.assertInvalid((1, 0), is_a_dsc=False)
-
-    def testRange(self):
-        self.assertInvalid((1, 3), is_a_dsc=False)
-        self.assertValid((1, 5), is_a_dsc=False)
-        self.assertValid((1, 8), is_a_dsc=False)
-        self.assertInvalid((1, 9), is_a_dsc=False)
-
-    def testFilesField(self):
-        self.assertInvalid((1, 7), is_a_dsc=False, field='notfiles')
-        self.assertValid((1, 8), is_a_dsc=False, field='notfiles')
-
-class ValidateFormatThreeTestCase(ValidateFormatTestCase):
-    fmt = srcformats.FormatThree
-
-    def testValid(self):
-        self.assertValid((3, 0, 'native'))
-
-    def testInvalid(self):
-        self.assertInvalid((1, 0))
-        self.assertInvalid((0, 0))
-        self.assertInvalid((3, 0, 'quilt'))
-
-class ValidateFormatThreeQuiltTestCase(ValidateFormatTestCase):
-    fmt = srcformats.FormatThreeQuilt
-
-    def testValid(self):
-        self.assertValid((3, 0, 'quilt'))
-
-    def testInvalid(self):
-        self.assertInvalid((1, 0))
-        self.assertInvalid((0, 0))
-        self.assertInvalid((3, 0, 'native'))
-
 class FormatFromStringTestCase(unittest.TestCase):
     def assertFormat(self, txt, klass):
         self.assertEqual(srcformats.get_format_from_string(txt), klass)