]> git.decadent.org.uk Git - dak.git/commitdiff
Merge commit 'mhy/master' into merge
authorJoerg Jaspert <joerg@debian.org>
Fri, 30 Oct 2009 08:48:11 +0000 (09:48 +0100)
committerJoerg Jaspert <joerg@debian.org>
Fri, 30 Oct 2009 08:48:11 +0000 (09:48 +0100)
* commit 'mhy/master':
  update import_keyring
  now we don't need the dm flag
  allow for keyring priorities
  implement key acls
  add keyring defaults
  tidy up and add more methods to SQLA objects
  add basic mapper classes for new tables
  add acls to database schema
  fix indentation
  move check_status to daklib/queue.py
  move recheck logic into daklib/queue.py
  remove unused code (moving to process-holding) and readd security support

Signed-off-by: Joerg Jaspert <joerg@debian.org>
dak/dakdb/update16.py [new file with mode: 0755]
dak/import_keyring.py
dak/process_new.py
dak/update_db.py
daklib/dbconn.py
daklib/queue.py

diff --git a/dak/dakdb/update16.py b/dak/dakdb/update16.py
new file mode 100755 (executable)
index 0000000..eca9b48
--- /dev/null
@@ -0,0 +1,178 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding tables for key-based ACLs and blocks
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009  Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def do_update(self):
+    print "Adding tables for handling key-based ACLs and upload blocks"
+
+    try:
+        c = self.db.cursor()
+
+        # Fix up some older table permissions
+        c.execute("GRANT SELECT ON src_format TO public")
+        c.execute("GRANT ALL ON src_format TO ftpmaster")
+        c.execute("GRANT USAGE ON src_format_id_seq TO ftpmaster")
+
+        c.execute("GRANT SELECT ON suite_src_formats TO public")
+        c.execute("GRANT ALL ON suite_src_formats TO ftpmaster")
+
+        # Source ACLs table
+        print "Source ACLs table"
+        c.execute("""
+        CREATE TABLE source_acl (
+              id SERIAL PRIMARY KEY,
+              access_level TEXT UNIQUE NOT NULL
+        )
+        """)
+
+        ## Can upload all packages
+        c.execute("INSERT INTO source_acl (access_level) VALUES ('full')")
+        ## Can upload only packages marked as DM upload allowed
+        c.execute("INSERT INTO source_acl (access_level) VALUES ('dm')")
+
+        c.execute("GRANT SELECT ON source_acl TO public")
+        c.execute("GRANT ALL ON source_acl TO ftpmaster")
+        c.execute("GRANT USAGE ON source_acl_id_seq TO ftpmaster")
+
+        # Binary ACLs table
+        print "Binary ACLs table"
+        c.execute("""
+        CREATE TABLE binary_acl (
+              id SERIAL PRIMARY KEY,
+              access_level TEXT UNIQUE NOT NULL
+        )
+        """)
+
+        ## Can upload any architectures of binary packages
+        c.execute("INSERT INTO binary_acl (access_level) VALUES ('full')")
+        ## Can upload debs where architectures are based on the map table binary_acl_map
+        c.execute("INSERT INTO binary_acl (access_level) VALUES ('map')")
+
+        c.execute("GRANT SELECT ON binary_acl TO public")
+        c.execute("GRANT ALL ON binary_acl TO ftpmaster")
+        c.execute("GRANT USAGE ON binary_acl_id_seq TO ftpmaster")
+
+        # This is only used if binary_acl is 2 for the fingerprint concerned
+        c.execute("""
+        CREATE TABLE binary_acl_map (
+              id SERIAL PRIMARY KEY,
+              fingerprint_id INT4 REFERENCES fingerprint (id) NOT NULL,
+              architecture_id INT4 REFERENCES architecture (id) NOT NULL,
+
+              UNIQUE (fingerprint_id, architecture_id)
+        )""")
+
+        c.execute("GRANT SELECT ON binary_acl_map TO public")
+        c.execute("GRANT ALL ON binary_acl_map TO ftpmaster")
+        c.execute("GRANT USAGE ON binary_acl_map_id_seq TO ftpmaster")
+
+        ## NULL means no source upload access (i.e. any upload containing source
+        ## will be rejected)
+        c.execute("ALTER TABLE fingerprint ADD COLUMN source_acl_id INT4 REFERENCES source_acl(id) DEFAULT NULL")
+
+        ## NULL means no binary upload access
+        c.execute("ALTER TABLE fingerprint ADD COLUMN binary_acl_id INT4 REFERENCES binary_acl(id) DEFAULT NULL")
+
+        ## TRUE here means that if the person doesn't have binary upload permissions for
+        ## an architecture, we'll reject the .changes.  FALSE means that we'll simply
+        ## dispose of those particular binaries
+        c.execute("ALTER TABLE fingerprint ADD COLUMN binary_reject BOOLEAN NOT NULL DEFAULT TRUE")
+
+        # Blockage table (replaces the hard coded stuff we used to have in extensions)
+        print "Adding blockage table"
+        c.execute("""
+        CREATE TABLE upload_blocks (
+              id             SERIAL PRIMARY KEY,
+              source         TEXT NOT NULL,
+              version        TEXT DEFAULT NULL,
+              fingerprint_id INT4 REFERENCES fingerprint (id),
+              uid_id         INT4 REFERENCES uid (id),
+              reason         TEXT NOT NULL,
+
+              CHECK (fingerprint_id IS NOT NULL OR uid_id IS NOT NULL)
+        )""")
+
+        c.execute("GRANT SELECT ON upload_blocks TO public")
+        c.execute("GRANT ALL ON upload_blocks TO ftpmaster")
+        c.execute("GRANT USAGE ON upload_blocks_id_seq TO ftpmaster")
+
+        c.execute("ALTER TABLE keyrings ADD COLUMN default_source_acl_id INT4 REFERENCES source_acl (id) DEFAULT NULL")
+        c.execute("ALTER TABLE keyrings ADD COLUMN default_binary_acl_id INT4 REFERENCES binary_acl (id) DEFAULT NULL")
+        c.execute("ALTER TABLE keyrings ADD COLUMN default_binary_reject BOOLEAN NOT NULL DEFAULT TRUE")
+        # Set up keyring priorities
+        c.execute("ALTER TABLE keyrings ADD COLUMN priority INT4 NOT NULL DEFAULT 100")
+        # And then we don't need the DM stuff any more
+        c.execute("ALTER TABLE keyrings DROP COLUMN debian_maintainer")
+
+        # Default ACLs for keyrings
+        c.execute("""
+        CREATE TABLE keyring_acl_map (
+              id SERIAL PRIMARY KEY,
+              keyring_id      INT4 REFERENCES keyrings (id) NOT NULL,
+              architecture_id INT4 REFERENCES architecture (id) NOT NULL,
+
+              UNIQUE (keyring_id, architecture_id)
+        )""")
+
+        c.execute("GRANT SELECT ON keyring_acl_map TO public")
+        c.execute("GRANT ALL ON keyring_acl_map TO ftpmaster")
+        c.execute("GRANT USAGE ON keyring_acl_map_id_seq TO ftpmaster")
+
+        # Set up some default stuff; default to old behaviour
+        print "Setting up some defaults"
+
+        c.execute("""UPDATE keyrings SET default_source_acl_id = (SELECT id FROM source_acl WHERE access_level = 'full'),
+                                         default_binary_acl_id = (SELECT id FROM binary_acl WHERE access_level = 'full')""")
+
+        c.execute("""UPDATE keyrings SET default_source_acl_id = (SELECT id FROM source_acl WHERE access_level = 'dm'),
+                                         default_binary_acl_id = (SELECT id FROM binary_acl WHERE access_level = 'full')
+                                     WHERE name = 'debian-maintainers.gpg'""")
+
+        c.execute("""UPDATE keyrings SET priority = 90 WHERE name = 'debian-maintainers.gpg'""")
+
+        # Initialize the existing keys
+        c.execute("""UPDATE fingerprint SET binary_acl_id = (SELECT default_binary_acl_id FROM keyrings
+                                                              WHERE keyrings.id = fingerprint.keyring)""")
+
+        c.execute("""UPDATE fingerprint SET source_acl_id = (SELECT default_source_acl_id FROM keyrings
+                                                              WHERE keyrings.id = fingerprint.keyring)""")
+
+        print "Updating config version"
+        c.execute("UPDATE config SET value = '16' WHERE name = 'db_revision'")
+        self.db.commit()
+
+    except psycopg2.ProgrammingError, msg:
+        self.db.rollback()
+        raise DBUpdateError, "Unable to apply ACLs update (16), rollback issued. Error message : %s" % (str(msg))
index 0b670357f2999c9d8d1a066e1290eaa24eda1f42..e26eb7e54d390d46641a506fba310427262267ee 100755 (executable)
@@ -2,6 +2,7 @@
 
 """ Imports a keyring into the database """
 # Copyright (C) 2007  Anthony Towns <aj@erisian.com.au>
+# Copyright (C) 2009  Mark Hymers <mhy@debian.org>
 
 # This program is free software; you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
 ################################################################################
 
 import sys, os, re
-import apt_pkg, ldap, email.Utils
+import apt_pkg, ldap
 
 from daklib.config import Config
 from daklib.dbconn import *
 
-
 # Globals
 Options = None
 
@@ -38,6 +38,7 @@ def get_uid_info(session):
     for (keyid, uid, name) in q.fetchall():
         byname[uid] = (keyid, name)
         byid[keyid] = (uid, name)
+
     return (byname, byid)
 
 def get_fingerprint_info(session):
@@ -49,126 +50,6 @@ def get_fingerprint_info(session):
 
 ################################################################################
 
-def get_ldap_name(entry):
-    name = []
-    for k in ["cn", "mn", "sn"]:
-        ret = entry.get(k)
-        if ret and ret[0] != "" and ret[0] != "-":
-            name.append(ret[0])
-    return " ".join(name)
-
-################################################################################
-
-class Keyring(object):
-    gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
-                     " --with-colons --fingerprint --fingerprint"
-    keys = {}
-    fpr_lookup = {}
-
-    def de_escape_gpg_str(self, str):
-        esclist = re.split(r'(\\x..)', str)
-        for x in range(1,len(esclist),2):
-            esclist[x] = "%c" % (int(esclist[x][2:],16))
-        return "".join(esclist)
-
-    def __init__(self, keyring):
-        self.cnf = Config()
-        k = os.popen(self.gpg_invocation % keyring, "r")
-        keys = self.keys
-        key = None
-        fpr_lookup = self.fpr_lookup
-        signingkey = False
-        for line in k.xreadlines():
-            field = line.split(":")
-            if field[0] == "pub":
-                key = field[4]
-                (name, addr) = email.Utils.parseaddr(field[9])
-                name = re.sub(r"\s*[(].*[)]", "", name)
-                if name == "" or addr == "" or "@" not in addr:
-                    name = field[9]
-                    addr = "invalid-uid"
-                name = self.de_escape_gpg_str(name)
-                keys[key] = {"email": addr}
-                if name != "": keys[key]["name"] = name
-                keys[key]["aliases"] = [name]
-                keys[key]["fingerprints"] = []
-                signingkey = True
-            elif key and field[0] == "sub" and len(field) >= 12:
-                signingkey = ("s" in field[11])
-            elif key and field[0] == "uid":
-                (name, addr) = email.Utils.parseaddr(field[9])
-                if name and name not in keys[key]["aliases"]:
-                    keys[key]["aliases"].append(name)
-            elif signingkey and field[0] == "fpr":
-                keys[key]["fingerprints"].append(field[9])
-                fpr_lookup[field[9]] = key
-
-    def generate_desired_users(self):
-        if Options["Generate-Users"]:
-            format = Options["Generate-Users"]
-            return self.generate_users_from_keyring(format)
-        if Options["Import-Ldap-Users"]:
-            return self.import_users_from_ldap()
-        return ({}, {})
-
-    def import_users_from_ldap(self):
-        LDAPDn = self.cnf["Import-LDAP-Fingerprints::LDAPDn"]
-        LDAPServer = self.cnf["Import-LDAP-Fingerprints::LDAPServer"]
-        l = ldap.open(LDAPServer)
-        l.simple_bind_s("","")
-        Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
-               "(&(keyfingerprint=*)(gidnumber=%s))" % (self.cnf["Import-Users-From-Passwd::ValidGID"]),
-               ["uid", "keyfingerprint", "cn", "mn", "sn"])
-
-        ldap_fin_uid_id = {}
-
-        byuid = {}
-        byname = {}
-        keys = self.keys
-        fpr_lookup = self.fpr_lookup
-
-        for i in Attrs:
-            entry = i[1]
-            uid = entry["uid"][0]
-            name = get_ldap_name(entry)
-            fingerprints = entry["keyFingerPrint"]
-            keyid = None
-            for f in fingerprints:
-                key = fpr_lookup.get(f, None)
-                if key not in keys: continue
-                keys[key]["uid"] = uid
-
-                if keyid != None: continue
-                keyid = get_or_set_uid(uid).uid
-                byuid[keyid] = (uid, name)
-                byname[uid] = (keyid, name)
-
-        return (byname, byuid)
-
-    def generate_users_from_keyring(self, format):
-        byuid = {}
-        byname = {}
-        keys = self.keys
-        any_invalid = False
-        for x in keys.keys():
-            if keys[x]["email"] == "invalid-uid":
-                any_invalid = True
-                keys[x]["uid"] = format % "invalid-uid"
-            else:
-                uid = format % keys[x]["email"]
-                keyid = get_or_set_uid(uid).uid
-                byuid[keyid] = (uid, keys[x]["name"])
-                byname[uid] = (keyid, keys[x]["name"])
-                keys[x]["uid"] = uid
-        if any_invalid:
-            uid = format % "invalid-uid"
-            keyid = get_or_set_uid(uid).uid
-            byuid[keyid] = (uid, "ungeneratable user id")
-            byname[uid] = (keyid, "ungeneratable user id")
-        return (byname, byuid)
-
-################################################################################
-
 def usage (exit_code=0):
     print """Usage: dak import-keyring [OPTION]... [KEYRING]
   -h, --help                  show this help and exit.
@@ -197,6 +78,7 @@ def main():
     ### Parse options
 
     Options = cnf.SubTree("Import-Keyring::Options")
+
     if Options["Help"]:
         usage()
 
@@ -204,7 +86,6 @@ def main():
         usage(1)
 
     ### Keep track of changes made
-
     changes = []   # (uid, changes strings)
 
     ### Initialise
@@ -216,22 +97,21 @@ def main():
     ### Parse the keyring
 
     keyringname = keyring_names[0]
-    keyring = Keyring(keyringname)
-
-    is_dm = "false"
-    if cnf.has_key("Import-Keyring::"+keyringname+"::Debian-Maintainer"):
-        session.execute("UPDATE keyrings SET debian_maintainer = :dm WHERE name = :name",
-                        {'dm': cnf["Import-Keyring::"+keyringname+"::Debian-Maintainer"],
-                         'name': keyringname.split("/")[-1]})
+    keyring = get_keyring(keyringname, session)
+    if not keyring:
+        print "E: Can't load keyring %s from database" % keyringname
+        sys.exit(1)
 
-        is_dm = cnf["Import-Keyring::"+keyringname+"::Debian-Maintainer"]
-
-    keyring_id = get_or_set_keyring(
-        keyringname.split("/")[-1], session,
-    ).keyring_id
+    keyring.load_keys(keyringname)
 
     ### Generate new uid entries if they're needed (from LDAP or the keyring)
-    (desuid_byname, desuid_byid) = keyring.generate_desired_users()
+    if Options["Generate-Users"]:
+        format = Options["Generate-Users"]
+        (desuid_byname, desuid_byid) = keyring.generate_users_from_keyring(Options["Generate-Users"], session)
+    elif Options["Import-Ldap-Users"]:
+        (desuid_byname, desuid_byid) = keyring.import_users_from_ldap(session)
+    else:
+        (desuid_byname, desuid_byid) = ({}, {})
 
     ### Cache all the existing uid entries
     (db_uid_byname, db_uid_byid) = get_uid_info(session)
@@ -240,7 +120,7 @@ def main():
     for keyid in desuid_byid.keys():
         uid = (keyid, desuid_byid[keyid][0])
         name = desuid_byid[keyid][1]
-        oname = db_uid_byname[keyid][1]
+        oname = db_uid_byid[keyid][1]
         if name and oname != name:
             changes.append((uid[1], "Full name: %s" % (name)))
             session.execute("UPDATE uid SET name = :name WHERE id = :keyid",
@@ -258,17 +138,28 @@ def main():
         if keyid == None:
             keyid = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
         for y in keyring.keys[z]["fingerprints"]:
-            fpr[y] = (keyid,keyring_id)
+            fpr[y] = (keyid, keyring.keyring_id)
 
     # For any keys that used to be in this keyring, disassociate them.
     # We don't change the uid, leaving that for historical info; if
     # the id should change, it'll be set when importing another keyring.
 
     for f,(u,fid,kr) in db_fin_info.iteritems():
-        if kr != keyring_id: continue
-        if f in fpr: continue
+        if kr != keyring.keyring_id:
+            continue
+
+        if f in fpr:
+            continue
+
         changes.append((db_uid_byid.get(u, [None])[0], "Removed key: %s" % (f)))
-        session.execute("UPDATE fingerprint SET keyring = NULL WHERE id = :fprid", {'fprid': fid})
+        session.execute("""UPDATE fingerprint
+                              SET keyring = NULL,
+                                  source_acl_id = NULL,
+                                  binary_acl_id = NULL,
+                                  binary_reject = TRUE
+                            WHERE id = :fprid""", {'fprid': fid})
+
+        session.execute("""DELETE FROM binary_acl_map WHERE fingerprint_id = :fprid""", {'fprid': fid})
 
     # For the keys in this keyring, add/update any fingerprints that've
     # changed.
@@ -276,19 +167,36 @@ def main():
     for f in fpr:
         newuid = fpr[f][0]
         newuiduid = db_uid_byid.get(newuid, [None])[0]
+
         (olduid, oldfid, oldkid) = db_fin_info.get(f, [-1,-1,-1])
-        if olduid == None: olduid = -1
-        if oldkid == None: oldkid = -1
+
+        if olduid == None:
+            olduid = -1
+
+        if oldkid == None:
+            oldkid = -1
+
         if oldfid == -1:
             changes.append((newuiduid, "Added key: %s" % (f)))
+            fp = Fingerprint()
+            fp.fingerprint = f
+            fp.keyring_id = keyring.keyring_id
             if newuid:
-                session.execute("""INSERT INTO fingerprint (fingerprint, uid, keyring)
-                                        VALUES (:fpr, :uid, :keyring)""",
-                                {'fpr': f, 'uid': uid, 'keyring': keyring_id})
-            else:
-                session.execute("""INSERT INTO fingerprint (fingerprint, keyring)
-                                        VALUES (:fpr, :keyring)""",
-                                {'fpr': f, 'keyring': keyring_id})
+                fp.uid_id = newuid
+
+            fp.binary_acl_id = keyring.default_binary_acl_id
+            fp.source_acl_id = keyring.default_source_acl_id
+            fp.default_binary_reject = keyring.default_binary_reject
+            session.add(fp)
+            session.flush()
+
+            for k in keyring.keyring_acl_map:
+                ba = BinaryACLMap()
+                ba.fingerprint_id = fp.fingerprint_id
+                ba.architecture_id = k.architecture_id
+                session.add(ba)
+                session.flush()
+
         else:
             if newuid and olduid != newuid:
                 if olduid != -1:
@@ -297,25 +205,62 @@ def main():
                 else:
                     changes.append((newuiduid, "Linked key: %s" % f))
                     changes.append((newuiduid, "  (formerly unowned)"))
+
                 session.execute("UPDATE fingerprint SET uid = :uid WHERE id = :fpr",
                                 {'uid': newuid, 'fpr': oldfid})
 
-            if oldkid != keyring_id:
+            # Don't move a key from a keyring with a higher priority to a lower one
+            if oldkid != keyring.keyring_id:
+                movekey = False
+                if oldkid == -1:
+                    movekey = True
+                else:
+                    try:
+                        oldkeyring = session.query(Keyring).filter_by(keyring_id=oldkid).one()
+                    except NotFoundError:
+                        print "ERROR: Cannot find old keyring with id %s" % oldkid
+                        sys.exit(1)
+
+                    if oldkeyring.priority < keyring.priority:
+                        movekey = True
+
                 # Only change the keyring if it won't result in a loss of permissions
-                q = session.execute("SELECT debian_maintainer FROM keyrings WHERE id = :keyring",
-                                    {'keyring': keyring_id})
-                if is_dm == "false" and not q.fetchall()[0][0]:
-                    session.execute("UPDATE fingerprint SET keyring = :keyring WHERE id = :fpr",
-                                    {'keyring': keyring_id, 'fpr': oldfid})
+                if movekey:
+                    session.execute("""DELETE FROM binary_acl_map WHERE fingerprint_id = :fprid""", {'fprid': oldfid})
+
+                    session.execute("""UPDATE fingerprint
+                                          SET keyring = :keyring,
+                                              source_acl_id = :source_acl_id,
+                                              binary_acl_id = :binary_acl_id,
+                                              binary_reject = :binary_reject
+                                        WHERE id = :fpr""",
+                                    {'keyring': keyring.keyring_id,
+                                     'source_acl_id': keyring.default_source_acl_id,
+                                     'binary_acl_id': keyring.default_binary_acl_id,
+                                     'binary_reject': keyring.default_binary_reject,
+                                     'fpr': oldfid})
+
+                    session.flush()
+
+                    for k in keyring.keyring_acl_map:
+                        ba = BinaryACLMap()
+                        ba.fingerprint_id = oldfid
+                        ba.architecture_id = k.architecture_id
+                        session.add(ba)
+                        session.flush()
+
                 else:
-                    print "Key %s exists in both DM and DD keyrings. Not demoting." % (f)
+                    print "Key %s exists in both %s and %s keyrings. Not demoting." % (oldkeyring.keyring_name,
+                                                                                       keyring.keyring_name)
 
     # All done!
     session.commit()
 
+    # Print a summary
     changesd = {}
     for (k, v) in changes:
-        if k not in changesd: changesd[k] = ""
+        if k not in changesd:
+            changesd[k] = ""
         changesd[k] += "    %s\n" % (v)
 
     keys = changesd.keys()
index ec69012ea0d1fa2c1141fe0048706d3031750b73..185157ac27290002961ebd4693e9c50fa94ff1de 100755 (executable)
@@ -77,37 +77,7 @@ Sections = None
 ################################################################################
 
 def recheck(upload, session):
-    files = upload.pkg.files
-
-    cnf = Config()
-    for f in files.keys():
-        # The .orig.tar.gz can disappear out from under us is it's a
-        # duplicate of one in the archive.
-        if not files.has_key(f):
-            continue
-        # Check that the source still exists
-        if files[f]["type"] == "deb":
-            source_version = files[f]["source version"]
-            source_package = files[f]["source package"]
-            if not upload.pkg.changes["architecture"].has_key("source") \
-               and not upload.source_exists(source_package, source_version, upload.pkg.changes["distribution"].keys()):
-                source_epochless_version = re_no_epoch.sub('', source_version)
-                dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
-                found = 0
-                for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
-                    if cnf.has_key("Dir::Queue::%s" % (q)):
-                        if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
-                            found = 1
-                if not found:
-                    upload.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
-
-        # Version and file overwrite checks
-        if files[f]["type"] == "deb":
-            upload.check_binary_against_db(f, session)
-        elif files[f]["type"] == "dsc":
-            upload.check_source_against_db(f, session)
-            upload.check_dsc_against_db(f, session)
-
+    upload.recheck()
     if len(upload.rejects) > 0:
         answer = "XXX"
         if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
@@ -745,7 +715,6 @@ def usage (exit_code=0):
     print """Usage: dak process-new [OPTION]... [CHANGES]...
   -a, --automatic           automatic run
   -h, --help                show this help and exit.
-  -C, --comments-dir=DIR    use DIR as comments-dir, for [o-]p-u-new
   -m, --manual-reject=MSG   manual reject with `msg'
   -n, --no-action           don't do anything
   -t, --trainee             FTP Trainee mode
@@ -847,39 +816,6 @@ def lock_package(package):
     finally:
         os.unlink(path)
 
-# def move_to_dir (upload, dest, perms=0660, changesperms=0664):
-#     utils.move (upload.pkg.changes_file, dest, perms=changesperms)
-#     file_keys = upload.pkg.files.keys()
-#     for f in file_keys:
-#         utils.move (f, dest, perms=perms)
-
-# def is_source_in_queue_dir(qdir):
-#     entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
-#                 and x.endswith(".changes") ]
-#     for entry in entries:
-#         # read the .dak
-#         u = queue.Upload(Cnf)
-#         u.pkg.changes_file = os.path.join(qdir, entry)
-#         u.update_vars()
-#         if not u.pkg.changes["architecture"].has_key("source"):
-#             # another binary upload, ignore
-#             continue
-#         if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
-#             # another version, ignore
-#             continue
-#         # found it!
-#         return True
-#     return False
-
-# def move_to_holding(suite, queue_dir):
-#     print "Moving to %s holding area." % (suite.upper(),)
-#     if Options["No-Action"]:
-#      return
-#     Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
-#     Upload.dump_vars(queue_dir)
-#     move_to_dir(queue_dir, perms=0664)
-#     os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
 def _accept(upload):
     if Options["No-Action"]:
         return
@@ -887,87 +823,21 @@ def _accept(upload):
     upload.accept(summary, short_summary, targetdir=Config()["Dir::Queue::Newstage"])
     os.unlink(upload.pkg.changes_file[:-8]+".dak")
 
-# def do_accept_stableupdate(upload,suite, q):
-#     cnf = Config()
-#     queue_dir = cnf["Dir::Queue::%s" % (q,)]
-#     if not upload.pkg.changes["architecture"].has_key("source"):
-#         # It is not a sourceful upload.  So its source may be either in p-u
-#         # holding, in new, in accepted or already installed.
-#         if is_source_in_queue_dir(queue_dir):
-#             # It's in p-u holding, so move it there.
-#             print "Binary-only upload, source in %s." % (q,)
-#             move_to_holding(suite, queue_dir)
-#         elif Upload.source_exists(Upload.pkg.changes["source"],
-#                 Upload.pkg.changes["version"]):
-#             # dak tells us that there is source available.  At time of
-#             # writing this means that it is installed, so put it into
-#             # accepted.
-#             print "Binary-only upload, source installed."
-#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-#             _accept()
-#         elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
-#             # The source is in accepted, the binary cleared NEW: accept it.
-#             print "Binary-only upload, source in accepted."
-#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-#             _accept()
-#         elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
-#             # It's in NEW.  We expect the source to land in p-u holding
-#             # pretty soon.
-#             print "Binary-only upload, source in new."
-#             move_to_holding(suite, queue_dir)
-#         elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
-#             # It's in newstage.  Accept into the holding area
-#             print "Binary-only upload, source in newstage."
-#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-#             _accept()
-#         else:
-#             # No case applicable.  Bail out.  Return will cause the upload
-#             # to be skipped.
-#             print "ERROR"
-#             print "Stable update failed.  Source not found."
-#             return
-#     else:
-#         # We are handling a sourceful upload.  Move to accepted if currently
-#         # in p-u holding and to p-u holding otherwise.
-#         if is_source_in_queue_dir(queue_dir):
-#             print "Sourceful upload in %s, accepting." % (q,)
-#             _accept()
-#         else:
-#             move_to_holding(suite, queue_dir)
-
 def do_accept(upload):
     print "ACCEPT"
     cnf = Config()
     if not Options["No-Action"]:
         (summary, short_summary) = upload.build_summaries()
-#     if cnf.FindB("Dinstall::SecurityQueueHandling"):
-#         upload.dump_vars(cnf["Dir::Queue::Embargoed"])
-#         move_to_dir(cnf["Dir::Queue::Embargoed"])
-#         upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
-#         # Check for override disparities
-#         upload.Subst["__SUMMARY__"] = summary
-#     else:
-        # Stable updates need to be copied to proposed-updates holding
-        # area instead of accepted.  Sourceful uploads need to go
-        # to it directly, binaries only if the source has not yet been
-        # accepted into p-u.
-        for suite, q in [("proposed-updates", "ProposedUpdates"),
-                ("oldstable-proposed-updates", "OldProposedUpdates")]:
-            if not upload.pkg.changes["distribution"].has_key(suite):
-                continue
-            utils.fubar("stable accept not supported yet")
-#            return do_accept_stableupdate(suite, q)
-        # Just a normal upload, accept it...
-        _accept(upload)
-
-def check_status(files):
-    new = byhand = 0
-    for f in files.keys():
-        if files[f]["type"] == "byhand":
-            byhand = 1
-        elif files[f].has_key("new"):
-            new = 1
-    return (new, byhand)
+
+        if cnf.FindB("Dinstall::SecurityQueueHandling"):
+            upload.dump_vars(cnf["Dir::Queue::Embargoed"])
+            upload.move_to_dir(cnf["Dir::Queue::Embargoed"])
+            upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
+            # Check for override disparities
+            upload.Subst["__SUMMARY__"] = summary
+        else:
+            # Just a normal upload, accept it...
+            _accept(upload)
 
 def do_pkg(changes_file, session):
     u = Upload()
@@ -1024,58 +894,6 @@ def end():
 
 ################################################################################
 
-# def do_comments(dir, opref, npref, line, fn):
-#     for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
-#         lines = open("%s/%s" % (dir, comm)).readlines()
-#         if len(lines) == 0 or lines[0] != line + "\n": continue
-#         changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
-#                                 and x.endswith(".changes") ]
-#         changes_files = sort_changes(changes_files)
-#         for f in changes_files:
-#             f = utils.validate_changes_file_arg(f, 0)
-#             if not f: continue
-#             print "\n" + f
-#             fn(f, "".join(lines[1:]))
-
-#         if opref != npref and not Options["No-Action"]:
-#             newcomm = npref + comm[len(opref):]
-#             os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
-
-# ################################################################################
-
-# def comment_accept(changes_file, comments):
-#     Upload.pkg.changes_file = changes_file
-#     Upload.init_vars()
-#     Upload.update_vars()
-#     Upload.update_subst()
-#     files = Upload.pkg.files
-
-#     if not recheck():
-#         return # dak wants to REJECT, crap
-
-#     (new, byhand) = check_status(files)
-#     if not new and not byhand:
-#         do_accept()
-
-# ################################################################################
-
-# def comment_reject(changes_file, comments):
-#     Upload.pkg.changes_file = changes_file
-#     Upload.init_vars()
-#     Upload.update_vars()
-#     Upload.update_subst()
-
-#     if not recheck():
-#         pass # dak has its own reasons to reject as well, which is fine
-
-#     reject(comments)
-#     print "REJECT\n" + reject_message,
-#     if not Options["No-Action"]:
-#         Upload.do_reject(0, reject_message)
-#         os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-################################################################################
-
 def main():
     global Options, Logger, Sections, Priorities
 
@@ -1084,17 +902,16 @@ def main():
 
     Arguments = [('a',"automatic","Process-New::Options::Automatic"),
                  ('h',"help","Process-New::Options::Help"),
-                 ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
                  ('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
                  ('t',"trainee","Process-New::Options::Trainee"),
                  ('n',"no-action","Process-New::Options::No-Action")]
 
-    for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
+    for i in ["automatic", "help", "manual-reject", "no-action", "version", "trainee"]:
         if not cnf.has_key("Process-New::Options::%s" % (i)):
             cnf["Process-New::Options::%s" % (i)] = ""
 
     changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv)
-    if len(changes_files) == 0 and not cnf.get("Process-New::Options::Comments-Dir",""):
+    if len(changes_files) == 0:
         changes_files = utils.get_changes_files(cnf["Dir::Queue::New"])
 
     Options = cnf.SubTree("Process-New::Options")
@@ -1119,22 +936,13 @@ def main():
     # Kill me now? **FIXME**
     cnf["Dinstall::Options::No-Mail"] = ""
 
-#     commentsdir = cnf.get("Process-New::Options::Comments-Dir","")
-#     if commentsdir:
-#        if changes_files != []:
-#            sys.stderr.write("Can't specify any changes files if working with comments-dir")
-#            sys.exit(1)
-#        do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
-#        do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
-#     else:
-    if True:
-        for changes_file in changes_files:
-            changes_file = utils.validate_changes_file_arg(changes_file, 0)
-            if not changes_file:
-                continue
-            print "\n" + changes_file
-
-            do_pkg (changes_file, session)
+    for changes_file in changes_files:
+        changes_file = utils.validate_changes_file_arg(changes_file, 0)
+        if not changes_file:
+            continue
+        print "\n" + changes_file
+
+        do_pkg (changes_file, session)
 
     end()
 
index ecf5cd2a80ac56589202f5a1d3bb4ebcbd68cd72..c54971cf902280557814dc60095021e7f8d22c27 100755 (executable)
@@ -44,7 +44,7 @@ from daklib.dak_exceptions import DBUpdateError
 ################################################################################
 
 Cnf = None
-required_database_schema = 15
+required_database_schema = 16
 
 ################################################################################
 
index 6d5497fc2d5f4b096d972631637e284eb2ce00aa..9b37af700f631b08fb7990dd4434d440cd6d8d54 100755 (executable)
@@ -34,6 +34,7 @@
 ################################################################################
 
 import os
+import re
 import psycopg2
 import traceback
 
@@ -377,6 +378,28 @@ __all__.append('get_binary_components')
 
 ################################################################################
 
+class BinaryACL(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<BinaryACL %s>' % self.binary_acl_id
+
+__all__.append('BinaryACL')
+
+################################################################################
+
+class BinaryACLMap(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<BinaryACLMap %s>' % self.binary_acl_map_id
+
+__all__.append('BinaryACLMap')
+
+################################################################################
+
 class Component(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -827,6 +850,33 @@ class Fingerprint(object):
 
 __all__.append('Fingerprint')
 
+@session_wrapper
+def get_fingerprint(fpr, session=None):
+    """
+    Returns Fingerprint object for given fpr.
+
+    @type fpr: string
+    @param fpr: The fpr to find / add
+
+    @type session: SQLAlchemy
+    @param session: Optional SQL session object (a temporary one will be
+    generated if not supplied).
+
+    @rtype: Fingerprint
+    @return: the Fingerprint object for the given fpr or None
+    """
+
+    q = session.query(Fingerprint).filter_by(fingerprint=fpr)
+
+    try:
+        ret = q.one()
+    except NoResultFound:
+        ret = None
+
+    return ret
+
+__all__.append('get_fingerprint')
+
 @session_wrapper
 def get_or_set_fingerprint(fpr, session=None):
     """
@@ -864,20 +914,139 @@ __all__.append('get_or_set_fingerprint')
 
 ################################################################################
 
+# Helper routine for Keyring class
+def get_ldap_name(entry):
+    name = []
+    for k in ["cn", "mn", "sn"]:
+        ret = entry.get(k)
+        if ret and ret[0] != "" and ret[0] != "-":
+            name.append(ret[0])
+    return " ".join(name)
+
+################################################################################
+
 class Keyring(object):
+    gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
+                     " --with-colons --fingerprint --fingerprint"
+
+    keys = {}
+    fpr_lookup = {}
+
     def __init__(self, *args, **kwargs):
         pass
 
     def __repr__(self):
         return '<Keyring %s>' % self.keyring_name
 
+    def de_escape_gpg_str(self, str):
+        esclist = re.split(r'(\\x..)', str)
+        for x in range(1,len(esclist),2):
+            esclist[x] = "%c" % (int(esclist[x][2:],16))
+        return "".join(esclist)
+
+    def load_keys(self, keyring):
+        import email.Utils
+
+        if not self.keyring_id:
+            raise Exception('Must be initialized with database information')
+
+        k = os.popen(self.gpg_invocation % keyring, "r")
+        key = None
+        signingkey = False
+
+        for line in k.xreadlines():
+            field = line.split(":")
+            if field[0] == "pub":
+                key = field[4]
+                (name, addr) = email.Utils.parseaddr(field[9])
+                name = re.sub(r"\s*[(].*[)]", "", name)
+                if name == "" or addr == "" or "@" not in addr:
+                    name = field[9]
+                    addr = "invalid-uid"
+                name = self.de_escape_gpg_str(name)
+                self.keys[key] = {"email": addr}
+                if name != "":
+                    self.keys[key]["name"] = name
+                self.keys[key]["aliases"] = [name]
+                self.keys[key]["fingerprints"] = []
+                signingkey = True
+            elif key and field[0] == "sub" and len(field) >= 12:
+                signingkey = ("s" in field[11])
+            elif key and field[0] == "uid":
+                (name, addr) = email.Utils.parseaddr(field[9])
+                if name and name not in self.keys[key]["aliases"]:
+                    self.keys[key]["aliases"].append(name)
+            elif signingkey and field[0] == "fpr":
+                self.keys[key]["fingerprints"].append(field[9])
+                self.fpr_lookup[field[9]] = key
+
+    def import_users_from_ldap(self, session):
+        import ldap
+        cnf = Config()
+
+        LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
+        LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
+
+        l = ldap.open(LDAPServer)
+        l.simple_bind_s("","")
+        Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
+               "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
+               ["uid", "keyfingerprint", "cn", "mn", "sn"])
+
+        ldap_fin_uid_id = {}
+
+        byuid = {}
+        byname = {}
+
+        for i in Attrs:
+            entry = i[1]
+            uid = entry["uid"][0]
+            name = get_ldap_name(entry)
+            fingerprints = entry["keyFingerPrint"]
+            keyid = None
+            for f in fingerprints:
+                key = self.fpr_lookup.get(f, None)
+                if key not in self.keys:
+                    continue
+                self.keys[key]["uid"] = uid
+
+                if keyid != None:
+                    continue
+                keyid = get_or_set_uid(uid, session).uid_id
+                byuid[keyid] = (uid, name)
+                byname[uid] = (keyid, name)
+
+        return (byname, byuid)
+
+    def generate_users_from_keyring(self, format, session):
+        byuid = {}
+        byname = {}
+        any_invalid = False
+        for x in self.keys.keys():
+            if self.keys[x]["email"] == "invalid-uid":
+                any_invalid = True
+                self.keys[x]["uid"] = format % "invalid-uid"
+            else:
+                uid = format % self.keys[x]["email"]
+                keyid = get_or_set_uid(uid, session).uid_id
+                byuid[keyid] = (uid, self.keys[x]["name"])
+                byname[uid] = (keyid, self.keys[x]["name"])
+                self.keys[x]["uid"] = uid
+
+        if any_invalid:
+            uid = format % "invalid-uid"
+            keyid = get_or_set_uid(uid, session).uid_id
+            byuid[keyid] = (uid, "ungeneratable user id")
+            byname[uid] = (keyid, "ungeneratable user id")
+
+        return (byname, byuid)
+
 __all__.append('Keyring')
 
 @session_wrapper
-def get_or_set_keyring(keyring, session=None):
+def get_keyring(keyring, session=None):
     """
-    If C{keyring} does not have an entry in the C{keyrings} table yet, create one
-    and return the new Keyring
+    If C{keyring} does not have an entry in the C{keyrings} table yet, return None
     If C{keyring} already has an entry, simply return the existing Keyring
 
     @type keyring: string
@@ -892,12 +1061,20 @@ def get_or_set_keyring(keyring, session=None):
     try:
         return q.one()
     except NoResultFound:
-        obj = Keyring(keyring_name=keyring)
-        session.add(obj)
-        session.commit_or_flush()
-        return obj
+        return None
 
-__all__.append('get_or_set_keyring')
+__all__.append('get_keyring')
+
+################################################################################
+
+class KeyringACLMap(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<KeyringACLMap %s>' % self.keyring_acl_map_id
+
+__all__.append('KeyringACLMap')
 
 ################################################################################
 
@@ -1770,6 +1947,17 @@ __all__.append('get_source_in_suite')
 
 ################################################################################
 
+class SourceACL(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<SourceACL %s>' % self.source_acl_id
+
+__all__.append('SourceACL')
+
+################################################################################
+
 class SrcAssociation(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -2089,6 +2277,17 @@ __all__.append('get_uid_from_fingerprint')
 
 ################################################################################
 
+class UploadBlock(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
+
+__all__.append('UploadBlock')
+
+################################################################################
+
 class DBConn(Singleton):
     """
     database module init.
@@ -2107,6 +2306,8 @@ class DBConn(Singleton):
         self.tbl_archive = Table('archive', self.db_meta, autoload=True)
         self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True)
         self.tbl_binaries = Table('binaries', self.db_meta, autoload=True)
+        self.tbl_binary_acl = Table('binary_acl', self.db_meta, autoload=True)
+        self.tbl_binary_acl_map = Table('binary_acl_map', self.db_meta, autoload=True)
         self.tbl_component = Table('component', self.db_meta, autoload=True)
         self.tbl_config = Table('config', self.db_meta, autoload=True)
         self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True)
@@ -2116,6 +2317,7 @@ class DBConn(Singleton):
         self.tbl_files = Table('files', self.db_meta, autoload=True)
         self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True)
         self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True)
+        self.tbl_keyring_acl_map = Table('keyring_acl_map', self.db_meta, autoload=True)
         self.tbl_location = Table('location', self.db_meta, autoload=True)
         self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True)
         self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True)
@@ -2127,6 +2329,7 @@ class DBConn(Singleton):
         self.tbl_queue_build = Table('queue_build', self.db_meta, autoload=True)
         self.tbl_section = Table('section', self.db_meta, autoload=True)
         self.tbl_source = Table('source', self.db_meta, autoload=True)
+        self.tbl_source_acl = Table('source_acl', self.db_meta, autoload=True)
         self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True)
         self.tbl_src_format = Table('src_format', self.db_meta, autoload=True)
         self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True)
@@ -2134,6 +2337,7 @@ class DBConn(Singleton):
         self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True)
         self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True)
         self.tbl_uid = Table('uid', self.db_meta, autoload=True)
+        self.tbl_upload_blocks = Table('upload_blocks', self.db_meta, autoload=True)
 
     def __setupmappers(self):
         mapper(Architecture, self.tbl_architecture,
@@ -2169,6 +2373,14 @@ class DBConn(Singleton):
                                  binassociations = relation(BinAssociation,
                                                             primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
 
+        mapper(BinaryACL, self.tbl_binary_acl,
+               properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
+
+        mapper(BinaryACLMap, self.tbl_binary_acl_map,
+               properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
+                                 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
+                                 architecture = relation(Architecture)))
+
         mapper(Component, self.tbl_component,
                properties = dict(component_id = self.tbl_component.c.id,
                                  component_name = self.tbl_component.c.name))
@@ -2212,12 +2424,19 @@ class DBConn(Singleton):
                                  uid_id = self.tbl_fingerprint.c.uid,
                                  uid = relation(Uid),
                                  keyring_id = self.tbl_fingerprint.c.keyring,
-                                 keyring = relation(Keyring)))
+                                 keyring = relation(Keyring),
+                                 source_acl = relation(SourceACL),
+                                 binary_acl = relation(BinaryACL)))
 
         mapper(Keyring, self.tbl_keyrings,
                properties = dict(keyring_name = self.tbl_keyrings.c.name,
                                  keyring_id = self.tbl_keyrings.c.id))
 
+        mapper(KeyringACLMap, self.tbl_keyring_acl_map,
+               properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
+                                 keyring = relation(Keyring, backref="keyring_acl_map"),
+                                 architecture = relation(Architecture)))
+
         mapper(Location, self.tbl_location,
                properties = dict(location_id = self.tbl_location.c.id,
                                  component_id = self.tbl_location.c.component,
@@ -2285,7 +2504,11 @@ class DBConn(Singleton):
                                  srcfiles = relation(DSCFile,
                                                      primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
                                  srcassociations = relation(SrcAssociation,
-                                                            primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source))))
+                                                            primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
+                                 srcuploaders = relation(SrcUploader)))
+
+        mapper(SourceACL, self.tbl_source_acl,
+               properties = dict(source_acl_id = self.tbl_source_acl.c.id))
 
         mapper(SrcAssociation, self.tbl_src_associations,
                properties = dict(sa_id = self.tbl_src_associations.c.id,
@@ -2326,6 +2549,11 @@ class DBConn(Singleton):
                properties = dict(uid_id = self.tbl_uid.c.id,
                                  fingerprint = relation(Fingerprint)))
 
+        mapper(UploadBlock, self.tbl_upload_blocks,
+               properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
+                                 fingerprint = relation(Fingerprint, backref="uploadblocks"),
+                                 uid = relation(Uid, backref="uploadblocks")))
+
     ## Connection functions
     def __createconn(self):
         from config import Config
index 10ae61879480e6a714170674d2d6a9127f06e7d9..eb55b251a73ddaef6f09bbbf6ba532593041a960 100755 (executable)
@@ -213,28 +213,14 @@ def check_valid(new):
 
 ###############################################################################
 
-def lookup_uid_from_fingerprint(fpr, session):
-    uid = None
-    uid_name = ""
-    # This is a stupid default, but see the comments below
-    is_dm = False
-
-    user = get_uid_from_fingerprint(fpr, session)
-
-    if user is not None:
-        uid = user.uid
-        if user.name is None:
-            uid_name = ''
-        else:
-            uid_name = user.name
-
-        # Check the relevant fingerprint (which we have to have)
-        for f in user.fingerprint:
-            if f.fingerprint == fpr:
-                is_dm = f.keyring.debian_maintainer
-                break
-
-    return (uid, uid_name, is_dm)
+def check_status(files):
+    new = byhand = 0
+    for f in files.keys():
+        if files[f]["type"] == "byhand":
+            byhand = 1
+        elif files[f].has_key("new"):
+            new = 1
+    return (new, byhand)
 
 ###############################################################################
 
@@ -1451,7 +1437,201 @@ class Upload(object):
                 except:
                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
 
+    def check_if_upload_is_sponsored(self, uid_email, uid_name):
+        if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
+            sponsored = False
+        elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
+            sponsored = False
+            if uid_name == "":
+                sponsored = True
+        else:
+            sponsored = True
+            if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
+                sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
+                if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
+                    self.pkg.changes["changedbyemail"] not in sponsor_addresses):
+                        self.pkg.changes["sponsoremail"] = uid_email
+
+        return sponsored
+
+
     ###########################################################################
+    # check_signed_by_key checks
+    ###########################################################################
+
+    def check_signed_by_key(self):
+        """Ensure the .changes is signed by an authorized uploader."""
+        session = DBConn().session()
+
+        # First of all we check that the person has proper upload permissions
+        # and that this upload isn't blocked
+        fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
+
+        if fpr is None:
+            self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
+            return
+
+        # TODO: Check that import-keyring adds UIDs properly
+        if not fpr.uid:
+            self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
+            return
+
+        # Check that the fingerprint which uploaded has permission to do so
+        self.check_upload_permissions(fpr, session)
+
+        # Check that this package is not in a transition
+        self.check_transition(session)
+
+        session.close()
+
+
+    def check_upload_permissions(self, fpr, session):
+        # Check any one-off upload blocks
+        self.check_upload_blocks(fpr, session)
+
+        # Start with DM as a special case
+        # DM is a special case unfortunately, so we check it first
+        # (keys with no source access get more access than DMs in one
+        #  way; DMs can only upload for their packages whether source
+        #  or binary, whereas keys with no access might be able to
+        #  upload some binaries)
+        if fpr.source_acl.access_level == 'dm':
+            self.check_dm_source_upload(fpr, session)
+        else:
+            # Check source-based permissions for other types
+            if self.pkg.changes["architecture"].has_key("source"):
+                if fpr.source_acl.access_level is None:
+                    rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+                    rej += '\nPlease contact ftpmaster if you think this is incorrect'
+                    self.rejects.append(rej)
+                    return
+            else:
+                # If not a DM, we allow full upload rights
+                uid_email = "%s@debian.org" % (fpr.uid.uid)
+                self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
+
+
+        # Check binary upload permissions
+        # By this point we know that DMs can't have got here unless they
+        # are allowed to deal with the package concerned so just apply
+        # normal checks
+        if fpr.binary_acl.access_level == 'full':
+            return
+
+        # Otherwise we're in the map case
+        tmparches = self.pkg.changes["architecture"].copy()
+        tmparches.pop('source', None)
+
+        for bam in fpr.binary_acl_map:
+            tmparches.pop(bam.architecture.arch_string, None)
+
+        if len(tmparches.keys()) > 0:
+            if fpr.binary_reject:
+                rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+                rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+                self.rejects.append(rej)
+            else:
+                # TODO: This is where we'll implement reject vs throw away binaries later
+                rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
+                rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
+                rej += "\nFingerprint: %s", (fpr.fingerprint)
+                self.rejects.append(rej)
+
+
+    def check_upload_blocks(self, fpr, session):
+        """Check whether any upload blocks apply to this source, source
+           version, uid / fpr combination"""
+
+        def block_rej_template(fb):
+            rej = 'Manual upload block in place for package %s' % fb.source
+            if fb.version is not None:
+                rej += ', version %s' % fb.version
+            return rej
+
+        for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
+            # version is None if the block applies to all versions
+            if fb.version is None or fb.version == self.pkg.changes['version']:
+                # Check both fpr and uid - either is enough to cause a reject
+                if fb.fpr is not None:
+                    if fb.fpr.fingerprint == fpr.fingerprint:
+                        self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
+                if fb.uid is not None:
+                    if fb.uid == fpr.uid:
+                        self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
+
+
+    def check_dm_upload(self, fpr, session):
+        # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
+        ## none of the uploaded packages are NEW
+        rej = False
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f].has_key("byhand"):
+                self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
+                rej = True
+            if self.pkg.files[f].has_key("new"):
+                self.rejects.append("%s may not upload NEW file %s" % (uid, f))
+                rej = True
+
+        if rej:
+            return
+
+        ## the most recent version of the package uploaded to unstable or
+        ## experimental includes the field "DM-Upload-Allowed: yes" in the source
+        ## section of its control file
+        q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
+        q = q.join(SrcAssociation)
+        q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
+        q = q.order_by(desc('source.version')).limit(1)
+
+        r = q.all()
+
+        if len(r) != 1:
+            rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
+            self.rejects.append(rej)
+            return
+
+        r = r[0]
+        if not r.dm_upload_allowed:
+            rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
+            self.rejects.append(rej)
+            return
+
+        ## the Maintainer: field of the uploaded .changes file corresponds with
+        ## the owner of the key used (ie, non-developer maintainers may not sponsor
+        ## uploads)
+        if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
+            self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
+
+        ## the most recent version of the package uploaded to unstable or
+        ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
+        ## non-developer maintainers cannot NMU or hijack packages)
+
+        # srcuploaders includes the maintainer
+        accept = False
+        for sup in r.srcuploaders:
+            (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
+            # Eww - I hope we never have two people with the same name in Debian
+            if email == fpr.uid.uid or name == fpr.uid.name:
+                accept = True
+                break
+
+        if not accept:
+            self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
+            return
+
+        ## none of the packages are being taken over from other source packages
+        for b in self.pkg.changes["binary"].keys():
+            for suite in self.pkg.changes["distribution"].keys():
+                q = session.query(DBSource)
+                q = q.join(DBBinary).filter_by(package=b)
+                q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
+
+                for s in q.all():
+                    if s.source != self.pkg.changes["source"]:
+                        self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
+
+
+
     def check_transition(self, session):
         cnf = Config()
 
@@ -1524,92 +1704,9 @@ transition is done."""
                     return
 
     ###########################################################################
-    def check_signed_by_key(self):
-        """Ensure the .changes is signed by an authorized uploader."""
-        session = DBConn().session()
-
-        self.check_transition(session)
-
-        (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
-
-        # match claimed name with actual name:
-        if uid is None:
-            # This is fundamentally broken but need us to refactor how we get
-            # the UIDs/Fingerprints in order for us to fix it properly
-            uid, uid_email = self.pkg.changes["fingerprint"], uid
-            may_nmu, may_sponsor = 1, 1
-            # XXX by default new dds don't have a fingerprint/uid in the db atm,
-            #     and can't get one in there if we don't allow nmu/sponsorship
-        elif is_dm is False:
-            # If is_dm is False, we allow full upload rights
-            uid_email = "%s@debian.org" % (uid)
-            may_nmu, may_sponsor = 1, 1
-        else:
-            # Assume limited upload rights unless we've discovered otherwise
-            uid_email = uid
-            may_nmu, may_sponsor = 0, 0
-
-        if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
-            sponsored = 0
-        elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
-            sponsored = 0
-            if uid_name == "": sponsored = 1
-        else:
-            sponsored = 1
-            if ("source" in self.pkg.changes["architecture"] and
-                uid_email and utils.is_email_alias(uid_email)):
-                sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
-                if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
-                    self.pkg.changes["changedbyemail"] not in sponsor_addresses):
-                    self.pkg.changes["sponsoremail"] = uid_email
-
-        if sponsored and not may_sponsor:
-            self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
-
-        if not sponsored and not may_nmu:
-            should_reject = True
-            highest_sid, highest_version = None, None
-
-            # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
-            #      It ignores higher versions with the dm_upload_allowed flag set to false
-            #      I'm keeping the existing behaviour for now until I've gone back and
-            #      checked exactly what the GR says - mhy
-            for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
-                if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
-                     highest_sid = si.source_id
-                     highest_version = si.version
-
-            if highest_sid is None:
-                self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
-            else:
-                for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
-                    (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
-                    if email == uid_email or name == uid_name:
-                        should_reject = False
-                        break
-
-            if should_reject is True:
-                self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
-
-            for b in self.pkg.changes["binary"].keys():
-                for suite in self.pkg.changes["distribution"].keys():
-                    q = session.query(DBSource)
-                    q = q.join(DBBinary).filter_by(package=b)
-                    q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
-
-                    for s in q.all():
-                        if s.source != self.pkg.changes["source"]:
-                            self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
-
-            for f in self.pkg.files.keys():
-                if self.pkg.files[f].has_key("byhand"):
-                    self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
-                if self.pkg.files[f].has_key("new"):
-                    self.rejects.append("%s may not upload NEW file %s" % (uid, f))
-
-        session.close()
-
+    # End check_signed_by_key checks
     ###########################################################################
+
     def build_summaries(self):
         """ Build a summary of changes the upload introduces. """
 
@@ -2373,6 +2470,44 @@ distribution."""
             if actual_size != int(dsc_entry["size"]):
                 self.rejects.append("size for %s doesn't match %s." % (found, file))
 
+    ################################################################################
+    # This is used by process-new and process-holding to recheck a changes file
+    # at the time we're running.  It mainly wraps various other internal functions
+    # and is similar to accepted_checks - these should probably be tidied up
+    # and combined
+    def recheck(self, session):
+        cnf = Config()
+        for f in self.pkg.files.keys():
+            # The .orig.tar.gz can disappear out from under us is it's a
+            # duplicate of one in the archive.
+            if not self.pkg.files.has_key(f):
+                continue
+
+            entry = self.pkg.files[f]
+
+            # Check that the source still exists
+            if entry["type"] == "deb":
+                source_version = entry["source version"]
+                source_package = entry["source package"]
+                if not self.pkg.changes["architecture"].has_key("source") \
+                   and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+                    source_epochless_version = re_no_epoch.sub('', source_version)
+                    dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
+                    found = False
+                    for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
+                        if cnf.has_key("Dir::Queue::%s" % (q)):
+                            if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+                                found = True
+                    if not found:
+                        self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
+
+            # Version and file overwrite checks
+            if entry["type"] == "deb":
+                self.check_binary_against_db(f, session)
+            elif entry["type"] == "dsc":
+                self.check_source_against_db(f, session)
+                self.check_dsc_against_db(f, session)
+
     ################################################################################
     def accepted_checks(self, overwrite_checks, session):
         # Recheck anything that relies on the database; since that's not