]> git.decadent.org.uk Git - dak.git/commitdiff
Merge commit 'ftpmaster/master'
authorMark Hymers <mhy@debian.org>
Thu, 29 Oct 2009 14:14:52 +0000 (14:14 +0000)
committerMark Hymers <mhy@debian.org>
Thu, 29 Oct 2009 14:14:52 +0000 (14:14 +0000)
dak/dakdb/update16.py [new file with mode: 0755]
dak/process_new.py
dak/update_db.py
daklib/dbconn.py
daklib/queue.py

diff --git a/dak/dakdb/update16.py b/dak/dakdb/update16.py
new file mode 100755 (executable)
index 0000000..e5b8699
--- /dev/null
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding tables for key-based ACLs and blocks
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009  Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def do_update(self):
+    print "Adding tables for handling key-based ACLs and upload blocks"
+
+    try:
+        c = self.db.cursor()
+
+        # Fix up some older table permissions
+        c.execute("GRANT SELECT ON src_format TO public")
+        c.execute("GRANT ALL ON src_format TO ftpmaster")
+        c.execute("GRANT USAGE ON src_format_id_seq TO ftpmaster")
+
+        c.execute("GRANT SELECT ON suite_src_formats TO public")
+        c.execute("GRANT ALL ON suite_src_formats TO ftpmaster")
+
+        # Source ACLs table
+        print "Source ACLs table"
+        c.execute("""
+        CREATE TABLE source_acl (
+              id SERIAL PRIMARY KEY,
+              access_level TEXT UNIQUE NOT NULL
+        )
+        """)
+
+        ## Can upload all packages
+        c.execute("INSERT INTO source_acl (access_level) VALUES ('full')")
+        ## Can upload only packages marked as DM upload allowed
+        c.execute("INSERT INTO source_acl (access_level) VALUES ('dm')")
+
+        c.execute("GRANT SELECT ON source_acl TO public")
+        c.execute("GRANT ALL ON source_acl TO ftpmaster")
+        c.execute("GRANT USAGE ON source_acl_id_seq TO ftpmaster")
+
+        # Binary ACLs table
+        print "Binary ACLs table"
+        c.execute("""
+        CREATE TABLE binary_acl (
+              id SERIAL PRIMARY KEY,
+              access_level TEXT UNIQUE NOT NULL
+        )
+        """)
+
+        ## Can upload any architectures of binary packages
+        c.execute("INSERT INTO binary_acl (access_level) VALUES ('full')")
+        ## Can upload debs where architectures are based on the map table binary_acl_map
+        c.execute("INSERT INTO binary_acl (access_level) VALUES ('map')")
+
+        c.execute("GRANT SELECT ON binary_acl TO public")
+        c.execute("GRANT ALL ON binary_acl TO ftpmaster")
+        c.execute("GRANT USAGE ON binary_acl_id_seq TO ftpmaster")
+
+        # This is only used if binary_acl is 2 for the fingerprint concerned
+        c.execute("""
+        CREATE TABLE binary_acl_map (
+              id SERIAL PRIMARY KEY,
+              fingerprint_id INT4 REFERENCES fingerprint (id) NOT NULL,
+              architecture_id INT4 REFERENCES architecture (id) NOT NULL,
+
+              UNIQUE (fingerprint_id, architecture_id)
+        )""")
+
+        c.execute("GRANT SELECT ON binary_acl_map TO public")
+        c.execute("GRANT ALL ON binary_acl_map TO ftpmaster")
+        c.execute("GRANT USAGE ON binary_acl_map_id_seq TO ftpmaster")
+
+        ## NULL means no source upload access (i.e. any upload containing source
+        ## will be rejected)
+        c.execute("ALTER TABLE fingerprint ADD COLUMN source_acl_id INT4 REFERENCES source_acl(id) DEFAULT NULL")
+
+        ## NULL means no binary upload access
+        c.execute("ALTER TABLE fingerprint ADD COLUMN binary_acl_id INT4 REFERENCES binary_acl(id) DEFAULT NULL")
+
+        ## TRUE here means that if the person doesn't have binary upload permissions for
+        ## an architecture, we'll reject the .changes.  FALSE means that we'll simply
+        ## dispose of those particular binaries
+        c.execute("ALTER TABLE fingerprint ADD COLUMN binary_reject BOOLEAN NOT NULL DEFAULT TRUE")
+
+        # Blockage table (replaces the hard coded stuff we used to have in extensions)
+        print "Adding blockage table"
+        c.execute("""
+        CREATE TABLE upload_blocks (
+              id             SERIAL PRIMARY KEY,
+              source         TEXT NOT NULL,
+              version        TEXT DEFAULT NULL,
+              fingerprint_id INT4 REFERENCES fingerprint (id),
+              uid_id         INT4 REFERENCES uid (id),
+              reason         TEXT NOT NULL,
+
+              CHECK (fingerprint_id IS NOT NULL OR uid_id IS NOT NULL)
+        )""")
+
+        c.execute("GRANT SELECT ON upload_blocks TO public")
+        c.execute("GRANT ALL ON upload_blocks TO ftpmaster")
+        c.execute("GRANT USAGE ON upload_blocks_id_seq TO ftpmaster")
+
+        c.execute("ALTER TABLE keyrings ADD COLUMN default_source_acl_id INT4 REFERENCES source_acl (id) DEFAULT NULL")
+        c.execute("ALTER TABLE keyrings ADD COLUMN default_binary_acl_id INT4 REFERENCES binary_acl (id) DEFAULT NULL")
+        c.execute("ALTER TABLE keyrings ADD COLUMN default_binary_reject BOOLEAN NOT NULL DEFAULT TRUE")
+
+        # Default ACLs for keyrings
+        c.execute("""
+        CREATE TABLE keyring_acl_map (
+              id SERIAL PRIMARY KEY,
+              keyring_id      INT4 REFERENCES keyrings (id) NOT NULL,
+              architecture_id INT4 REFERENCES architecture (id) NOT NULL,
+
+              UNIQUE (keyring_id, architecture_id)
+        )""")
+
+        c.execute("GRANT SELECT ON keyring_acl_map TO public")
+        c.execute("GRANT ALL ON keyring_acl_map TO ftpmaster")
+        c.execute("GRANT USAGE ON keyring_acl_map_id_seq TO ftpmaster")
+
+        # Set up some default stuff; default to old behaviour
+        print "Setting up some defaults"
+
+        c.execute("""UPDATE keyrings SET default_source_acl_id = (SELECT id FROM source_acl WHERE access_level = 'full'),
+                                         default_binary_acl_id = (SELECT id FROM binary_acl WHERE access_level = 'full')""")
+
+        c.execute("""UPDATE keyrings SET default_source_acl_id = (SELECT id FROM source_acl WHERE access_level = 'dm'),
+                                         default_binary_acl_id = (SELECT id FROM binary_acl WHERE access_level = 'full')
+                                     WHERE name = 'debian-maintainers.gpg'""")
+
+
+        # Initialize the existing keys
+        c.execute("""UPDATE fingerprint SET binary_acl_id = (SELECT default_binary_acl_id FROM keyrings
+                                                              WHERE keyrings.id = fingerprint.keyring)""")
+
+        c.execute("""UPDATE fingerprint SET source_acl_id = (SELECT default_source_acl_id FROM keyrings
+                                                              WHERE keyrings.id = fingerprint.keyring)""")
+
+        print "Updating config version"
+        c.execute("UPDATE config SET value = '16' WHERE name = 'db_revision'")
+        self.db.commit()
+
+    except psycopg2.ProgrammingError, msg:
+        self.db.rollback()
+        raise DBUpdateError, "Unable to apply ACLs update (16), rollback issued. Error message : %s" % (str(msg))
index 9a6c8e330f787b7828a5ee14e8db8a21064c4e58..1ab3da45915de5240a956c971e66a65c57c3ea07 100755 (executable)
@@ -77,37 +77,7 @@ Sections = None
 ################################################################################
 
 def recheck(upload, session):
-    files = upload.pkg.files
-
-    cnf = Config()
-    for f in files.keys():
-        # The .orig.tar.gz can disappear out from under us is it's a
-        # duplicate of one in the archive.
-        if not files.has_key(f):
-            continue
-        # Check that the source still exists
-        if files[f]["type"] == "deb":
-            source_version = files[f]["source version"]
-            source_package = files[f]["source package"]
-            if not upload.pkg.changes["architecture"].has_key("source") \
-               and not upload.source_exists(source_package, source_version, upload.pkg.changes["distribution"].keys()):
-                source_epochless_version = re_no_epoch.sub('', source_version)
-                dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
-                found = 0
-                for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
-                    if cnf.has_key("Dir::Queue::%s" % (q)):
-                        if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
-                            found = 1
-                if not found:
-                    upload.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
-
-        # Version and file overwrite checks
-        if files[f]["type"] == "deb":
-            upload.check_binary_against_db(f, session)
-        elif files[f]["type"] == "dsc":
-            upload.check_source_against_db(f, session)
-            upload.check_dsc_against_db(f, session)
-
+    upload.recheck()
     if len(upload.rejects) > 0:
         answer = "XXX"
         if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
@@ -745,7 +715,6 @@ def usage (exit_code=0):
     print """Usage: dak process-new [OPTION]... [CHANGES]...
   -a, --automatic           automatic run
   -h, --help                show this help and exit.
-  -C, --comments-dir=DIR    use DIR as comments-dir, for [o-]p-u-new
   -m, --manual-reject=MSG   manual reject with `msg'
   -n, --no-action           don't do anything
   -t, --trainee             FTP Trainee mode
@@ -847,39 +816,6 @@ def lock_package(package):
     finally:
         os.unlink(path)
 
-# def move_to_dir (upload, dest, perms=0660, changesperms=0664):
-#     utils.move (upload.pkg.changes_file, dest, perms=changesperms)
-#     file_keys = upload.pkg.files.keys()
-#     for f in file_keys:
-#         utils.move (f, dest, perms=perms)
-
-# def is_source_in_queue_dir(qdir):
-#     entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
-#                 and x.endswith(".changes") ]
-#     for entry in entries:
-#         # read the .dak
-#         u = queue.Upload(Cnf)
-#         u.pkg.changes_file = os.path.join(qdir, entry)
-#         u.update_vars()
-#         if not u.pkg.changes["architecture"].has_key("source"):
-#             # another binary upload, ignore
-#             continue
-#         if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
-#             # another version, ignore
-#             continue
-#         # found it!
-#         return True
-#     return False
-
-# def move_to_holding(suite, queue_dir):
-#     print "Moving to %s holding area." % (suite.upper(),)
-#     if Options["No-Action"]:
-#      return
-#     Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
-#     Upload.dump_vars(queue_dir)
-#     move_to_dir(queue_dir, perms=0664)
-#     os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
 def _accept(upload):
     if Options["No-Action"]:
         return
@@ -887,87 +823,21 @@ def _accept(upload):
     upload.accept(summary, short_summary, targetdir=Config()["Dir::Queue::Newstage"])
     os.unlink(upload.pkg.changes_file[:-8]+".dak")
 
-# def do_accept_stableupdate(upload,suite, q):
-#     cnf = Config()
-#     queue_dir = cnf["Dir::Queue::%s" % (q,)]
-#     if not upload.pkg.changes["architecture"].has_key("source"):
-#         # It is not a sourceful upload.  So its source may be either in p-u
-#         # holding, in new, in accepted or already installed.
-#         if is_source_in_queue_dir(queue_dir):
-#             # It's in p-u holding, so move it there.
-#             print "Binary-only upload, source in %s." % (q,)
-#             move_to_holding(suite, queue_dir)
-#         elif Upload.source_exists(Upload.pkg.changes["source"],
-#                 Upload.pkg.changes["version"]):
-#             # dak tells us that there is source available.  At time of
-#             # writing this means that it is installed, so put it into
-#             # accepted.
-#             print "Binary-only upload, source installed."
-#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-#             _accept()
-#         elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
-#             # The source is in accepted, the binary cleared NEW: accept it.
-#             print "Binary-only upload, source in accepted."
-#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-#             _accept()
-#         elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
-#             # It's in NEW.  We expect the source to land in p-u holding
-#             # pretty soon.
-#             print "Binary-only upload, source in new."
-#             move_to_holding(suite, queue_dir)
-#         elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
-#             # It's in newstage.  Accept into the holding area
-#             print "Binary-only upload, source in newstage."
-#             Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-#             _accept()
-#         else:
-#             # No case applicable.  Bail out.  Return will cause the upload
-#             # to be skipped.
-#             print "ERROR"
-#             print "Stable update failed.  Source not found."
-#             return
-#     else:
-#         # We are handling a sourceful upload.  Move to accepted if currently
-#         # in p-u holding and to p-u holding otherwise.
-#         if is_source_in_queue_dir(queue_dir):
-#             print "Sourceful upload in %s, accepting." % (q,)
-#             _accept()
-#         else:
-#             move_to_holding(suite, queue_dir)
-
 def do_accept(upload):
     print "ACCEPT"
     cnf = Config()
     if not Options["No-Action"]:
         (summary, short_summary) = upload.build_summaries()
-#     if cnf.FindB("Dinstall::SecurityQueueHandling"):
-#         upload.dump_vars(cnf["Dir::Queue::Embargoed"])
-#         move_to_dir(cnf["Dir::Queue::Embargoed"])
-#         upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
-#         # Check for override disparities
-#         upload.Subst["__SUMMARY__"] = summary
-#     else:
-        # Stable updates need to be copied to proposed-updates holding
-        # area instead of accepted.  Sourceful uploads need to go
-        # to it directly, binaries only if the source has not yet been
-        # accepted into p-u.
-        for suite, q in [("proposed-updates", "ProposedUpdates"),
-                ("oldstable-proposed-updates", "OldProposedUpdates")]:
-            if not upload.pkg.changes["distribution"].has_key(suite):
-                continue
-            utils.fubar("stable accept not supported yet")
-#            return do_accept_stableupdate(suite, q)
-        # Just a normal upload, accept it...
-        _accept(upload)
-
-def check_status(files):
-    new = byhand = 0
-    for f in files.keys():
-        if files[f]["type"] == "byhand":
-            byhand = 1
-        elif files[f].has_key("new"):
-            new = 1
-    return (new, byhand)
+
+        if cnf.FindB("Dinstall::SecurityQueueHandling"):
+            upload.dump_vars(cnf["Dir::Queue::Embargoed"])
+            upload.move_to_dir(cnf["Dir::Queue::Embargoed"])
+            upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
+            # Check for override disparities
+            upload.Subst["__SUMMARY__"] = summary
+        else:
+            # Just a normal upload, accept it...
+            _accept(upload)
 
 def do_pkg(changes_file, session):
     u = Upload()
@@ -1024,58 +894,6 @@ def end():
 
 ################################################################################
 
-# def do_comments(dir, opref, npref, line, fn):
-#     for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
-#         lines = open("%s/%s" % (dir, comm)).readlines()
-#         if len(lines) == 0 or lines[0] != line + "\n": continue
-#         changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
-#                                 and x.endswith(".changes") ]
-#         changes_files = sort_changes(changes_files)
-#         for f in changes_files:
-#             f = utils.validate_changes_file_arg(f, 0)
-#             if not f: continue
-#             print "\n" + f
-#             fn(f, "".join(lines[1:]))
-
-#         if opref != npref and not Options["No-Action"]:
-#             newcomm = npref + comm[len(opref):]
-#             os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
-
-# ################################################################################
-
-# def comment_accept(changes_file, comments):
-#     Upload.pkg.changes_file = changes_file
-#     Upload.init_vars()
-#     Upload.update_vars()
-#     Upload.update_subst()
-#     files = Upload.pkg.files
-
-#     if not recheck():
-#         return # dak wants to REJECT, crap
-
-#     (new, byhand) = check_status(files)
-#     if not new and not byhand:
-#         do_accept()
-
-# ################################################################################
-
-# def comment_reject(changes_file, comments):
-#     Upload.pkg.changes_file = changes_file
-#     Upload.init_vars()
-#     Upload.update_vars()
-#     Upload.update_subst()
-
-#     if not recheck():
-#         pass # dak has its own reasons to reject as well, which is fine
-
-#     reject(comments)
-#     print "REJECT\n" + reject_message,
-#     if not Options["No-Action"]:
-#         Upload.do_reject(0, reject_message)
-#         os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-################################################################################
-
 def main():
     global Options, Logger, Sections, Priorities
 
@@ -1084,17 +902,16 @@ def main():
 
     Arguments = [('a',"automatic","Process-New::Options::Automatic"),
                  ('h',"help","Process-New::Options::Help"),
-                 ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
                  ('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
                  ('t',"trainee","Process-New::Options::Trainee"),
                  ('n',"no-action","Process-New::Options::No-Action")]
 
-    for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
+    for i in ["automatic", "help", "manual-reject", "no-action", "version", "trainee"]:
         if not cnf.has_key("Process-New::Options::%s" % (i)):
             cnf["Process-New::Options::%s" % (i)] = ""
 
     changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv)
-    if len(changes_files) == 0 and not cnf.get("Process-New::Options::Comments-Dir",""):
+    if len(changes_files) == 0:
         changes_files = utils.get_changes_files(cnf["Dir::Queue::New"])
 
     Options = cnf.SubTree("Process-New::Options")
@@ -1119,22 +936,13 @@ def main():
     # Kill me now? **FIXME**
     cnf["Dinstall::Options::No-Mail"] = ""
 
-#     commentsdir = cnf.get("Process-New::Options::Comments-Dir","")
-#     if commentsdir:
-#        if changes_files != []:
-#            sys.stderr.write("Can't specify any changes files if working with comments-dir")
-#            sys.exit(1)
-#        do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
-#        do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
-#     else:
-    if True:
-        for changes_file in changes_files:
-            changes_file = utils.validate_changes_file_arg(changes_file, 0)
-            if not changes_file:
-                continue
-            print "\n" + changes_file
-
-            do_pkg (changes_file, session)
+    for changes_file in changes_files:
+        changes_file = utils.validate_changes_file_arg(changes_file, 0)
+        if not changes_file:
+            continue
+        print "\n" + changes_file
+
+        do_pkg (changes_file, session)
 
     end()
 
index ecf5cd2a80ac56589202f5a1d3bb4ebcbd68cd72..c54971cf902280557814dc60095021e7f8d22c27 100755 (executable)
@@ -44,7 +44,7 @@ from daklib.dak_exceptions import DBUpdateError
 ################################################################################
 
 Cnf = None
-required_database_schema = 15
+required_database_schema = 16
 
 ################################################################################
 
index 6d5497fc2d5f4b096d972631637e284eb2ce00aa..c16cb2f9921fbe6d0b7cd8b5f72c239441a13b36 100755 (executable)
@@ -377,6 +377,28 @@ __all__.append('get_binary_components')
 
 ################################################################################
 
+class BinaryACL(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<BinaryACL %s>' % self.binary_acl_id
+
+__all__.append('BinaryACL')
+
+################################################################################
+
+class BinaryACLMap(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<BinaryACLMap %s>' % self.binary_acl_map_id
+
+__all__.append('BinaryACLMap')
+
+################################################################################
+
 class Component(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -827,6 +849,33 @@ class Fingerprint(object):
 
 __all__.append('Fingerprint')
 
+@session_wrapper
+def get_fingerprint(fpr, session=None):
+    """
+    Returns Fingerprint object for given fpr.
+
+    @type fpr: string
+    @param fpr: The fpr to find / add
+
+    @type session: SQLAlchemy
+    @param session: Optional SQL session object (a temporary one will be
+    generated if not supplied).
+
+    @rtype: Fingerprint
+    @return: the Fingerprint object for the given fpr or None
+    """
+
+    q = session.query(Fingerprint).filter_by(fingerprint=fpr)
+
+    try:
+        ret = q.one()
+    except NoResultFound:
+        ret = None
+
+    return ret
+
+__all__.append('get_fingerprint')
+
 @session_wrapper
 def get_or_set_fingerprint(fpr, session=None):
     """
@@ -901,6 +950,17 @@ __all__.append('get_or_set_keyring')
 
 ################################################################################
 
+class KeyringACLMap(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<KeyringACLMap %s>' % self.keyring_acl_map_id
+
+__all__.append('KeyringACLMap')
+
+################################################################################
+
 class Location(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -1770,6 +1830,17 @@ __all__.append('get_source_in_suite')
 
 ################################################################################
 
+class SourceACL(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<SourceACL %s>' % self.source_acl_id
+
+__all__.append('SourceACL')
+
+################################################################################
+
 class SrcAssociation(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -2089,6 +2160,17 @@ __all__.append('get_uid_from_fingerprint')
 
 ################################################################################
 
+class UploadBlock(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
+
+__all__.append('UploadBlock')
+
+################################################################################
+
 class DBConn(Singleton):
     """
     database module init.
@@ -2107,6 +2189,8 @@ class DBConn(Singleton):
         self.tbl_archive = Table('archive', self.db_meta, autoload=True)
         self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True)
         self.tbl_binaries = Table('binaries', self.db_meta, autoload=True)
+        self.tbl_binary_acl = Table('binary_acl', self.db_meta, autoload=True)
+        self.tbl_binary_acl_map = Table('binary_acl_map', self.db_meta, autoload=True)
         self.tbl_component = Table('component', self.db_meta, autoload=True)
         self.tbl_config = Table('config', self.db_meta, autoload=True)
         self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True)
@@ -2116,6 +2200,7 @@ class DBConn(Singleton):
         self.tbl_files = Table('files', self.db_meta, autoload=True)
         self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True)
         self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True)
+        self.tbl_keyring_acl_map = Table('keyring_acl_map', self.db_meta, autoload=True)
         self.tbl_location = Table('location', self.db_meta, autoload=True)
         self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True)
         self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True)
@@ -2127,6 +2212,7 @@ class DBConn(Singleton):
         self.tbl_queue_build = Table('queue_build', self.db_meta, autoload=True)
         self.tbl_section = Table('section', self.db_meta, autoload=True)
         self.tbl_source = Table('source', self.db_meta, autoload=True)
+        self.tbl_source_acl = Table('source_acl', self.db_meta, autoload=True)
         self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True)
         self.tbl_src_format = Table('src_format', self.db_meta, autoload=True)
         self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True)
@@ -2134,6 +2220,7 @@ class DBConn(Singleton):
         self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True)
         self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True)
         self.tbl_uid = Table('uid', self.db_meta, autoload=True)
+        self.tbl_upload_blocks = Table('upload_blocks', self.db_meta, autoload=True)
 
     def __setupmappers(self):
         mapper(Architecture, self.tbl_architecture,
@@ -2169,6 +2256,14 @@ class DBConn(Singleton):
                                  binassociations = relation(BinAssociation,
                                                             primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
 
+        mapper(BinaryACL, self.tbl_binary_acl,
+               properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
+
+        mapper(BinaryACLMap, self.tbl_binary_acl_map,
+               properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
+                                 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
+                                 architecture = relation(Architecture)))
+
         mapper(Component, self.tbl_component,
                properties = dict(component_id = self.tbl_component.c.id,
                                  component_name = self.tbl_component.c.name))
@@ -2212,12 +2307,19 @@ class DBConn(Singleton):
                                  uid_id = self.tbl_fingerprint.c.uid,
                                  uid = relation(Uid),
                                  keyring_id = self.tbl_fingerprint.c.keyring,
-                                 keyring = relation(Keyring)))
+                                 keyring = relation(Keyring),
+                                 source_acl = relation(SourceACL),
+                                 binary_acl = relation(BinaryACL)))
 
         mapper(Keyring, self.tbl_keyrings,
                properties = dict(keyring_name = self.tbl_keyrings.c.name,
                                  keyring_id = self.tbl_keyrings.c.id))
 
+        mapper(KeyringACLMap, self.tbl_keyring_acl_map,
+               properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
+                                 keyring = relation(Keyring, backref="keyring_acl_map"),
+                                 architecture = relation(Architecture)))
+
         mapper(Location, self.tbl_location,
                properties = dict(location_id = self.tbl_location.c.id,
                                  component_id = self.tbl_location.c.component,
@@ -2285,7 +2387,11 @@ class DBConn(Singleton):
                                  srcfiles = relation(DSCFile,
                                                      primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
                                  srcassociations = relation(SrcAssociation,
-                                                            primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source))))
+                                                            primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
+                                 srcuploaders = relation(SrcUploader)))
+
+        mapper(SourceACL, self.tbl_source_acl,
+               properties = dict(source_acl_id = self.tbl_source_acl.c.id))
 
         mapper(SrcAssociation, self.tbl_src_associations,
                properties = dict(sa_id = self.tbl_src_associations.c.id,
@@ -2326,6 +2432,11 @@ class DBConn(Singleton):
                properties = dict(uid_id = self.tbl_uid.c.id,
                                  fingerprint = relation(Fingerprint)))
 
+        mapper(UploadBlock, self.tbl_upload_blocks,
+               properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
+                                 fingerprint = relation(Fingerprint, backref="uploadblocks"),
+                                 uid = relation(Uid, backref="uploadblocks")))
+
     ## Connection functions
     def __createconn(self):
         from config import Config
index 61e2df05d5bef723b0b7b713b0f9a56d65599c95..e6547f7eab951c9e056e68c847f5701768c65557 100755 (executable)
@@ -213,28 +213,14 @@ def check_valid(new):
 
 ###############################################################################
 
-def lookup_uid_from_fingerprint(fpr, session):
-    uid = None
-    uid_name = ""
-    # This is a stupid default, but see the comments below
-    is_dm = False
-
-    user = get_uid_from_fingerprint(fpr, session)
-
-    if user is not None:
-        uid = user.uid
-        if user.name is None:
-            uid_name = ''
-        else:
-            uid_name = user.name
-
-        # Check the relevant fingerprint (which we have to have)
-        for f in user.fingerprint:
-            if f.fingerprint == fpr:
-                is_dm = f.keyring.debian_maintainer
-                break
-
-    return (uid, uid_name, is_dm)
+def check_status(files):
+    new = byhand = 0
+    for f in files.keys():
+        if files[f]["type"] == "byhand":
+            byhand = 1
+        elif files[f].has_key("new"):
+            new = 1
+    return (new, byhand)
 
 ###############################################################################
 
@@ -1423,7 +1409,201 @@ class Upload(object):
                 except:
                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
 
+    def check_if_upload_is_sponsored(self, uid_email, uid_name):
+        if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
+            sponsored = False
+        elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
+            sponsored = False
+            if uid_name == "":
+                sponsored = True
+        else:
+            sponsored = True
+            if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
+                sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
+                if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
+                    self.pkg.changes["changedbyemail"] not in sponsor_addresses):
+                        self.pkg.changes["sponsoremail"] = uid_email
+
+        return sponsored
+
+
     ###########################################################################
+    # check_signed_by_key checks
+    ###########################################################################
+
+    def check_signed_by_key(self):
+        """Ensure the .changes is signed by an authorized uploader."""
+        session = DBConn().session()
+
+        # First of all we check that the person has proper upload permissions
+        # and that this upload isn't blocked
+        fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
+
+        if fpr is None:
+            self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
+            return
+
+        # TODO: Check that import-keyring adds UIDs properly
+        if not fpr.uid:
+            self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
+            return
+
+        # Check that the fingerprint which uploaded has permission to do so
+        self.check_upload_permissions(fpr, session)
+
+        # Check that this package is not in a transition
+        self.check_transition(session)
+
+        session.close()
+
+
+    def check_upload_permissions(self, fpr, session):
+        # Check any one-off upload blocks
+        self.check_upload_blocks(fpr, session)
+
+        # Start with DM as a special case
+        # DM is a special case unfortunately, so we check it first
+        # (keys with no source access get more access than DMs in one
+        #  way; DMs can only upload for their packages whether source
+        #  or binary, whereas keys with no access might be able to
+        #  upload some binaries)
+        if fpr.source_acl.access_level == 'dm':
+            self.check_dm_source_upload(fpr, session)
+        else:
+            # Check source-based permissions for other types
+            if self.pkg.changes["architecture"].has_key("source"):
+                if fpr.source_acl.access_level is None:
+                    rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+                    rej += '\nPlease contact ftpmaster if you think this is incorrect'
+                    self.rejects.append(rej)
+                    return
+            else:
+                # If not a DM, we allow full upload rights
+                uid_email = "%s@debian.org" % (fpr.uid.uid)
+                self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
+
+
+        # Check binary upload permissions
+        # By this point we know that DMs can't have got here unless they
+        # are allowed to deal with the package concerned so just apply
+        # normal checks
+        if fpr.binary_acl.access_level == 'full':
+            return
+
+        # Otherwise we're in the map case
+        tmparches = self.pkg.changes["architecture"].copy()
+        tmparches.pop('source', None)
+
+        for bam in fpr.binary_acl_map:
+            tmparches.pop(bam.architecture.arch_string, None)
+
+        if len(tmparches.keys()) > 0:
+            if fpr.binary_reject:
+                rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+                rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+                self.rejects.append(rej)
+            else:
+                # TODO: This is where we'll implement reject vs throw away binaries later
+                rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
+                rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
+                rej += "\nFingerprint: %s", (fpr.fingerprint)
+                self.rejects.append(rej)
+
+
+    def check_upload_blocks(self, fpr, session):
+        """Check whether any upload blocks apply to this source, source
+           version, uid / fpr combination"""
+
+        def block_rej_template(fb):
+            rej = 'Manual upload block in place for package %s' % fb.source
+            if fb.version is not None:
+                rej += ', version %s' % fb.version
+            return rej
+
+        for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
+            # version is None if the block applies to all versions
+            if fb.version is None or fb.version == self.pkg.changes['version']:
+                # Check both fpr and uid - either is enough to cause a reject
+                if fb.fpr is not None:
+                    if fb.fpr.fingerprint == fpr.fingerprint:
+                        self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
+                if fb.uid is not None:
+                    if fb.uid == fpr.uid:
+                        self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
+
+
+    def check_dm_upload(self, fpr, session):
+        # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
+        ## none of the uploaded packages are NEW
+        rej = False
+        for f in self.pkg.files.keys():
+            if self.pkg.files[f].has_key("byhand"):
+                self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
+                rej = True
+            if self.pkg.files[f].has_key("new"):
+                self.rejects.append("%s may not upload NEW file %s" % (uid, f))
+                rej = True
+
+        if rej:
+            return
+
+        ## the most recent version of the package uploaded to unstable or
+        ## experimental includes the field "DM-Upload-Allowed: yes" in the source
+        ## section of its control file
+        q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
+        q = q.join(SrcAssociation)
+        q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
+        q = q.order_by(desc('source.version')).limit(1)
+
+        r = q.all()
+
+        if len(r) != 1:
+            rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
+            self.rejects.append(rej)
+            return
+
+        r = r[0]
+        if not r.dm_upload_allowed:
+            rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
+            self.rejects.append(rej)
+            return
+
+        ## the Maintainer: field of the uploaded .changes file corresponds with
+        ## the owner of the key used (ie, non-developer maintainers may not sponsor
+        ## uploads)
+        if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
+            self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
+
+        ## the most recent version of the package uploaded to unstable or
+        ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
+        ## non-developer maintainers cannot NMU or hijack packages)
+
+        # srcuploaders includes the maintainer
+        accept = False
+        for sup in r.srcuploaders:
+            (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
+            # Eww - I hope we never have two people with the same name in Debian
+            if email == fpr.uid.uid or name == fpr.uid.name:
+                accept = True
+                break
+
+        if not accept:
+            self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
+            return
+
+        ## none of the packages are being taken over from other source packages
+        for b in self.pkg.changes["binary"].keys():
+            for suite in self.pkg.changes["distribution"].keys():
+                q = session.query(DBSource)
+                q = q.join(DBBinary).filter_by(package=b)
+                q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
+
+                for s in q.all():
+                    if s.source != self.pkg.changes["source"]:
+                        self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
+
+
+
     def check_transition(self, session):
         cnf = Config()
 
@@ -1496,92 +1676,9 @@ transition is done."""
                     return
 
     ###########################################################################
-    def check_signed_by_key(self):
-        """Ensure the .changes is signed by an authorized uploader."""
-        session = DBConn().session()
-
-        self.check_transition(session)
-
-        (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
-
-        # match claimed name with actual name:
-        if uid is None:
-            # This is fundamentally broken but need us to refactor how we get
-            # the UIDs/Fingerprints in order for us to fix it properly
-            uid, uid_email = self.pkg.changes["fingerprint"], uid
-            may_nmu, may_sponsor = 1, 1
-            # XXX by default new dds don't have a fingerprint/uid in the db atm,
-            #     and can't get one in there if we don't allow nmu/sponsorship
-        elif is_dm is False:
-            # If is_dm is False, we allow full upload rights
-            uid_email = "%s@debian.org" % (uid)
-            may_nmu, may_sponsor = 1, 1
-        else:
-            # Assume limited upload rights unless we've discovered otherwise
-            uid_email = uid
-            may_nmu, may_sponsor = 0, 0
-
-        if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
-            sponsored = 0
-        elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
-            sponsored = 0
-            if uid_name == "": sponsored = 1
-        else:
-            sponsored = 1
-            if ("source" in self.pkg.changes["architecture"] and
-                uid_email and utils.is_email_alias(uid_email)):
-                sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
-                if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
-                    self.pkg.changes["changedbyemail"] not in sponsor_addresses):
-                    self.pkg.changes["sponsoremail"] = uid_email
-
-        if sponsored and not may_sponsor:
-            self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
-
-        if not sponsored and not may_nmu:
-            should_reject = True
-            highest_sid, highest_version = None, None
-
-            # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
-            #      It ignores higher versions with the dm_upload_allowed flag set to false
-            #      I'm keeping the existing behaviour for now until I've gone back and
-            #      checked exactly what the GR says - mhy
-            for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
-                if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
-                     highest_sid = si.source_id
-                     highest_version = si.version
-
-            if highest_sid is None:
-                self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
-            else:
-                for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
-                    (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
-                    if email == uid_email or name == uid_name:
-                        should_reject = False
-                        break
-
-            if should_reject is True:
-                self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
-
-            for b in self.pkg.changes["binary"].keys():
-                for suite in self.pkg.changes["distribution"].keys():
-                    q = session.query(DBSource)
-                    q = q.join(DBBinary).filter_by(package=b)
-                    q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
-
-                    for s in q.all():
-                        if s.source != self.pkg.changes["source"]:
-                            self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
-
-            for f in self.pkg.files.keys():
-                if self.pkg.files[f].has_key("byhand"):
-                    self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
-                if self.pkg.files[f].has_key("new"):
-                    self.rejects.append("%s may not upload NEW file %s" % (uid, f))
-
-        session.close()
-
+    # End check_signed_by_key checks
     ###########################################################################
+
     def build_summaries(self):
         """ Build a summary of changes the upload introduces. """
 
@@ -2345,6 +2442,44 @@ distribution."""
             if actual_size != int(dsc_entry["size"]):
                 self.rejects.append("size for %s doesn't match %s." % (found, file))
 
+    ################################################################################
+    # This is used by process-new and process-holding to recheck a changes file
+    # at the time we're running.  It mainly wraps various other internal functions
+    # and is similar to accepted_checks - these should probably be tidied up
+    # and combined
+    def recheck(self, session):
+        cnf = Config()
+        for f in self.pkg.files.keys():
+            # The .orig.tar.gz can disappear out from under us is it's a
+            # duplicate of one in the archive.
+            if not self.pkg.files.has_key(f):
+                continue
+
+            entry = self.pkg.files[f]
+
+            # Check that the source still exists
+            if entry["type"] == "deb":
+                source_version = entry["source version"]
+                source_package = entry["source package"]
+                if not self.pkg.changes["architecture"].has_key("source") \
+                   and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+                    source_epochless_version = re_no_epoch.sub('', source_version)
+                    dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
+                    found = False
+                    for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
+                        if cnf.has_key("Dir::Queue::%s" % (q)):
+                            if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+                                found = True
+                    if not found:
+                        self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
+
+            # Version and file overwrite checks
+            if entry["type"] == "deb":
+                self.check_binary_against_db(f, session)
+            elif entry["type"] == "dsc":
+                self.check_source_against_db(f, session)
+                self.check_dsc_against_db(f, session)
+
     ################################################################################
     def accepted_checks(self, overwrite_checks, session):
         # Recheck anything that relies on the database; since that's not