]> git.decadent.org.uk Git - dak.git/commitdiff
Merge remote-tracking branch 'origin/dakweb'
authorMark Hymers <mhy@debian.org>
Fri, 7 Nov 2014 14:07:58 +0000 (14:07 +0000)
committerMark Hymers <mhy@debian.org>
Fri, 7 Nov 2014 14:07:58 +0000 (14:07 +0000)
51 files changed:
config/debian/apache.conf-incoming [deleted file]
config/debian/common
config/debian/cron.daily
config/debian/cron.unchecked
config/debian/dak.conf
config/debian/vars
dak/admin.py
dak/dakdb/update103.py [new file with mode: 0644]
dak/dakdb/update104.py [new file with mode: 0644]
dak/export_suite.py
dak/generate_packages_sources2.py
dak/init_dirs.py
dak/make_pkg_file_mapping.py
dak/process_new.py
dak/process_policy.py
dak/process_upload.py
dak/show_deferred.py
daklib/architecture.py
daklib/archive.py
daklib/checks.py
daklib/command.py
daklib/dbconn.py
daklib/gpg.py
daklib/packagelist.py
daklib/policy.py
daklib/regexes.py
daklib/upload.py
docs/README.stable-point-release
docs/debian-specific
scripts/debian/ftpstats.R
scripts/debian/moveftp.sh
scripts/debian/sync-dd
scripts/debian/update-ftpstats
scripts/nfu/get-w-b-db
setup/README
tests/fixtures/gpg/expired-subkey.asc [new file with mode: 0644]
tests/fixtures/gpg/expired.asc [new file with mode: 0644]
tests/fixtures/gpg/gnupghome/pubring.gpg [new file with mode: 0644]
tests/fixtures/gpg/gnupghome/secring.gpg [new file with mode: 0644]
tests/fixtures/gpg/message.asc [new file with mode: 0644]
tests/fixtures/gpg/plaintext.txt [new file with mode: 0644]
tests/fixtures/gpg/valid.asc [new file with mode: 0644]
tests/test_architecture.py
tests/test_gpg.py [new file with mode: 0755]
tests/test_packagelist.py
tools/debianqueued-0.9/config
tools/debianqueued-0.9/config-backports
tools/debianqueued-0.9/config-security
tools/debianqueued-0.9/config-security-disembargo
tools/debianqueued-0.9/config-security-embargoed
tools/debianqueued-0.9/config-upload

diff --git a/config/debian/apache.conf-incoming b/config/debian/apache.conf-incoming
deleted file mode 100644 (file)
index 37c417c..0000000
+++ /dev/null
@@ -1,146 +0,0 @@
-<VirtualHost *:80>
-  ServerName incoming.debian.org
-  DocumentRoot /srv/incoming.debian.org/public
-
-  # deny by default
-  <Directory />
-    Order allow,deny
-  </Directory>
-
-  # incoming dir, no subdir.  allow everyone except abusers.
-  <Directory /srv/incoming.debian.org/public>
-    IndexIgnore robots.txt
-    Order deny,allow
-  </Directory>
-
-  <Directory /srv/incoming.debian.org>
-    <Files robots.txt>
-      Order deny,allow
-    </Files>
-  </Directory>
-  Alias /robots.txt /srv/incoming.debian.org/robots.txt
-
-  # buildd direct access -- buildds only
-  Alias /debian /srv/ftp.debian.org/mirror
-  Alias /debian-backports /srv/backports-master.debian.org/mirror
-  Alias /debian-buildd /srv/incoming.debian.org/debian-buildd
-
-  <DirectoryMatch ~ "^/srv/(incoming\.debian\.org/(builddweb|debian-buildd/)|ftp\.debian\.org/mirror|backports-master\.debian\.org/mirror)">
-    Order allow,deny
-
-    Use DebianBuilddHostList
-
-    # buildd.d.o, cimarosa
-    allow from 206.12.19.8
-
-    # franck.d.o
-    allow from 138.16.160.12
-
-    # test access to check functionality, ganneff
-    allow from 213.146.108.162
-
-    # for testing sbuild-db; rleigh@codelibre.net
-    # nagini.codelibre.net
-    allow from 80.68.93.164
-    allow from 2001:41c8:1:5750::2
-
-    # Should be in DSA list
-    # amd64
-    # vitry (archive rebuild)
-    allow from 194.177.211.206
-    allow from 2001:648:2ffc:deb:214:22ff:feb2:122c
-    # krenek (archive rebuild)
-    allow from 194.177.211.207
-    allow from 2001:648:2ffc:deb:214:22ff:feb1:ff56
-
-    # Known Extras
-
-    # No idea about
-    # arm
-    ## netwinder
-    allow from 192.133.104.24
-    ##
-    allow from 217.147.81.26
-    ## toffee
-    allow from 78.32.9.218
-    ##
-    allow from 86.3.74.169
-    ## nw1.xandros
-    allow from 67.210.160.89
-    ## nw2.xandros
-    allow from 67.210.160.90
-    ## hdges.billgatliff
-    allow from 209.251.101.204
-
-    # armel
-    ## allegri
-    allow from 157.193.39.233
-
-    # hppa
-    ## bld3.mmjgroup
-    allow from 192.25.206.243
-    ## paer
-    allow from 192.25.206.11
-
-    # hurd-i386
-    ## rossini (NOT .debian.org)
-    allow from 192.33.98.55
-    ## back / mozart (xen domains; NOT .debian.org)
-    allow from 80.87.129.151
-    # ironforge.sceen.net.
-    allow from 46.105.42.111
-
-    # ia64
-    ## mundi
-    allow from 192.25.206.62
-
-    # mips
-    ##
-    allow from 217.147.81.21
-    ## sigrun, aba
-    allow from 82.195.75.68
-    allow from 2001:41b8:202:deb:a00:69ff:fe08:30c6
-
-    # mipsel
-    ## monteverdi
-    allow from 78.47.2.111
-    ## kritias, aba
-    allow from 78.46.213.163
-
-    # s390
-    ## debian01.zseries
-    allow from 195.243.109.161
-    ## l003092.zseriespenguins.ihost.com
-    allow from 32.97.40.46
-    ##
-    allow from 148.100.96.45
-    ##
-    allow from 148.100.96.52
-    ## lxdebian.bfinv
-    allow from 80.245.147.60
-
-    # sparc
-    ## titan.ayous.org ('non-standard' buildd; contact HE)
-    allow from 82.195.75.33
-
-    # kfreebsd
-    ## i386
-    # himalai1, ganymede1
-    allow from 129.175.22.65
-    ## luchesi
-    ## Password based due to being KVM instance
-    ##   allow from 137.82.84.78
-
-    # Dynamics use password auth
-
-    AuthType Basic
-    AuthName "incoming.debian.org"
-    AuthUserFile /srv/incoming.debian.org/htpasswd
-    require valid-user
-    satisfy any
-  </DirectoryMatch>
-
-  CustomLog               /var/log/apache2/incoming-access.log privacy
-  ErrorLog                /var/log/apache2/incoming-error.log
-
-</VirtualHost>
index 606f614a400afba90e29e7e4a87fbdfeb214b334..e44f2bb0e468a86dbd0fec8b7227d1b306c82814 100644 (file)
@@ -33,16 +33,16 @@ function wbtrigger() {
 
 # used by cron.dinstall *and* cron.unchecked.
 function make_buildd_dir () {
-    local overridesuite
-    local suite
-
+    # We generate straight into the static mirror location for incoming
     dak manage-build-queues -a
     dak generate-packages-sources2 -a build-queues
     dak generate-releases -a build-queues >/dev/null
-    ${scriptsdir}/update-buildd-archive ${base}/build-queues ${incoming}/debian-buildd
 
-    rm -f ${incoming}/public/*
-    dak export-suite -s "accepted" -d "$incoming/public"
+    # Stick a last modified date in the page footer
+    echo "<p>Last updated: `date -u`</p>" > ${incoming}/web/README.html
+
+    # Tell the mirrors that we've updated
+    chronic /usr/local/bin/static-update-component incoming.debian.org
 }
 
 # Process (oldstable)-proposed-updates "NEW" queue
index 7eda55867f342c4c58bc7a372142766f50c2f07d..85bd99e1281b4ea7e8b5cf00cabd5e679a8e36a9 100755 (executable)
@@ -36,9 +36,8 @@ mv ${TMPFILE} /srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm
 
 # Push files over to security
 # The key over there should have the following set for the ssh key:
-#  command="/usr/bin/xzcat | /usr/bin/psql -f - -1 obscurity"
-pg_dump -a -F p -t files | \
-            sed -e "s,^COPY files (,DELETE FROM external_files; COPY external_files (," | \
+#  command="/usr/bin/xzcat | /usr/bin/psql -1 -c 'DELETE FROM external_files; COPY external_files (id, filename, size, md5sum, last_used, sha1sum, sha256sum, created, modified) FROM STDIN' obscurity"
+psql -c 'COPY files (id, filename, size, md5sum, last_used, sha1sum, sha256sum, created, modified) TO STDOUT' projectb | \
             xz -3 | \
             ssh -o BatchMode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 \
                 -i ${base}/s3kr1t/push_external_files dak@security-master.debian.org sync
index 34571185fa69f0fb2d85ef6b303872c9166202b8..74eb58b2c2a9c4a7a1a0b00ae308090ca8afb5f8 100755 (executable)
@@ -60,28 +60,11 @@ cleanup() {
 function do_buildd () {
     if lockfile -r3 $NOTICE; then
         LOCKDAILY="YES"
-        cd $overridedir
-        dak make-overrides &>/dev/null
-        rm -f override.sid.all3 override.sid.all3.src override.squeeze-backports.all3 override.squeeze-backports.all3.src
-        for i in main contrib non-free main.debian-installer; do
-            cat override.sid.$i >> override.sid.all3
-            cat override.squeeze-backports.$i >> override.squeeze-backports.all3
-            if [ "$i" != "main.debian-installer" ]; then
-                cat override.sid.$i.src >> override.sid.all3.src
-                cat override.squeeze-backports.$i.src >> override.squeeze-backports.all3.src
-            fi
-        done
         make_buildd_dir
         wbtrigger
     fi
 }
 
-function do_dists () {
-    #cd $configdir
-    #dak generate-filelist -s unstable,experimental -i
-    dak generate-packages-sources2 -s unstable experimental >/dev/null
-}
-
 ########################################################################
 # the actual unchecked functions follow                                #
 ########################################################################
@@ -112,7 +95,6 @@ do_unchecked
 if [ ! -z "$changes" ]; then
     sync_debbugs
     do_buildd
-    #do_dists
 fi
 
 dak contents -l 10000 scan-binary
index 8c5c1fb54af164905e97fbac0f97c635e471399b..92b77813d6fae1fd8a6b1733722ee0a57da12ade 100644 (file)
@@ -19,6 +19,7 @@ Dinstall
    DefaultSuite "unstable";
    LintianTags "/srv/ftp-master.debian.org/dak/config/debian/lintian.tags";
    ReleaseTransitions "/srv/ftp.debian.org/web/transitions.yaml";
+   AllowSourceOnlyUploads true;
    // if you setup an own dak repository and want to upload Debian packages you most possibly want
    // to set the following option to a real path/filename and then enter those mail addresses that
    // you want to be able to receive mails generated by your dak installation. This avoids spamming
index 03a86a78fc2c052157f0fa5c486a66d649dfb311..e14e5158c5bae1d27feb86f8e4767568d9509492 100644 (file)
@@ -23,7 +23,7 @@ queuedir=$base/queue/
 unchecked=$queuedir/unchecked/
 accepted=$queuedir/accepted/
 mirrordir=$base/mirror/
-incoming=$base/incoming
+incoming=$base/public/incoming.debian.org/
 newstage=$queuedir/newstage/
 exportdir=$base/export/
 exportpublic=$public/rsync/export/
index da8669895f345044ef5430f27dccfaa192c7bf13..43e20ecfe9209b6f87c2237e8a620f650740618a 100755 (executable)
@@ -24,6 +24,7 @@ import sys
 import apt_pkg
 
 import daklib.archive
+import daklib.gpg
 
 from daklib import utils
 from daklib.dbconn import *
@@ -143,6 +144,8 @@ Perform administrative work on the dak database.
      change-component SUITE COMPONENT binary BINARY...
          Move source or binary packages to a different component by copying
          associated files and changing the overrides.
+
+  forget-signature FILE:    forget that we saw FILE
 """
     sys.exit(exit_code)
 
@@ -927,6 +930,26 @@ dispatch['change-component'] = change_component
 
 ################################################################################
 
+def forget_signature(args):
+    filename = args[1]
+    with open(filename, 'r') as fh:
+        data = fh.read()
+
+    session = DBConn().session()
+    keyrings = [ k.keyring_name for k in session.query(Keyring).filter_by(active=True).order_by(Keyring.priority) ]
+    signed_file = daklib.gpg.SignedFile(data, keyrings)
+    history = SignatureHistory.from_signed_file(signed_file).query(session)
+    if history is not None:
+        session.delete(history)
+        session.commit()
+    else:
+        print "Signature was not known to dak."
+    session.rollback()
+
+dispatch['forget-signature'] = forget_signature
+
+################################################################################
+
 def main():
     """Perform administrative work on the dak database"""
     global dryrun
diff --git a/dak/dakdb/update103.py b/dak/dakdb/update103.py
new file mode 100644 (file)
index 0000000..4a54f63
--- /dev/null
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Drop unique .changes name requirement and allow ftpteam to forget seen files
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2014, Ansgar Burchardt <ansgar@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from daklib.config import Config
+
+statements = [
+"ALTER TABLE changes DROP CONSTRAINT IF EXISTS known_changes_changesname_key",
+"DROP INDEX IF EXISTS changesurgency_ind",
+"GRANT DELETE ON signature_history TO ftpteam",
+]
+
+################################################################################
+def do_update(self):
+    print __doc__
+    try:
+        cnf = Config()
+
+        c = self.db.cursor()
+
+        for stmt in statements:
+            c.execute(stmt)
+
+        c.execute("UPDATE config SET value = '103' WHERE name = 'db_revision'")
+        self.db.commit()
+
+    except psycopg2.ProgrammingError as msg:
+        self.db.rollback()
+        raise DBUpdateError('Unable to apply sick update 103, rollback issued. Error message: {0}'.format(msg))
diff --git a/dak/dakdb/update104.py b/dak/dakdb/update104.py
new file mode 100644 (file)
index 0000000..0847992
--- /dev/null
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Drop created and modified columns from bin_contents and src_contents
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2014, Ansgar Burchardt <ansgar@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from daklib.config import Config
+
+statements = [
+"DROP TRIGGER IF EXISTS modified_bin_contents ON bin_contents",
+"ALTER TABLE bin_contents DROP COLUMN created, DROP COLUMN modified",
+"DROP TRIGGER IF EXISTS modified_src_contents ON src_contents",
+"ALTER TABLE src_contents DROP COLUMN created, DROP COLUMN modified",
+]
+
+################################################################################
+def do_update(self):
+    print __doc__
+    try:
+        cnf = Config()
+
+        c = self.db.cursor()
+
+        for stmt in statements:
+            c.execute(stmt)
+
+        c.execute("UPDATE config SET value = '104' WHERE name = 'db_revision'")
+        self.db.commit()
+
+    except psycopg2.ProgrammingError as msg:
+        self.db.rollback()
+        raise DBUpdateError('Unable to apply sick update 104, rollback issued. Error message: {0}'.format(msg))
index 23775583b67030b78da306e0b3e2440cc5c577e8..02608ee4c0ef548a4cb2e16110cbd3f1b89a36c7 100644 (file)
@@ -32,6 +32,7 @@ Export binaries and sources from a suite to a flat directory structure.
  -c --copy         copy files instead of symlinking them
  -d <directory>    target directory to export packages to
                    default: current directory
+ -r --relative     use symlinks relative to target directory
  -s <suite>        suite to grab uploads from
 """
 
@@ -42,6 +43,7 @@ def main(argv=None):
     arguments = [('h', 'help', 'Export::Options::Help'),
                  ('c', 'copy', 'Export::Options::Copy'),
                  ('d', 'directory', 'Export::Options::Directory', 'HasArg'),
+                 ('r', 'relative', 'Export::Options::Relative'),
                  ('s', 'suite', 'Export::Options::Suite', 'HasArg')]
 
     cnf = Config()
@@ -65,6 +67,11 @@ def main(argv=None):
         sys.exit(1)
 
     symlink = 'Copy' not in options
+    relative = 'Relative' in options
+
+    if relative and not symlink:
+        print "E: --relative and --copy cannot be used together."
+        sys.exit(1)
 
     binaries = suite.binaries
     sources = suite.sources
@@ -80,17 +87,12 @@ def main(argv=None):
                         .join(ArchiveFile.component).join(ArchiveFile.file) \
                         .filter(ArchiveFile.archive == suite.archive) \
                         .filter(ArchiveFile.file == f).first()
-            # XXX: Remove later. There was a bug that caused only the *.dsc to
-            # be installed in build queues and we do not want to break them.
-            # The bug was fixed in 55d2c7e6e2418518704623246021021e05b90e58
-            # on 2012-11-04
-            if af is None:
-                af = session.query(ArchiveFile) \
-                            .join(ArchiveFile.component).join(ArchiveFile.file) \
-                            .filter(ArchiveFile.file == f).first()
+            src = af.path
+            if relative:
+                src = os.path.relpath(src, directory)
             dst = os.path.join(directory, f.basename)
             if not os.path.exists(dst):
-                fs.copy(af.path, dst, symlink=symlink)
+                fs.copy(src, dst, symlink=symlink)
         fs.commit()
 
 if __name__ == '__main__':
index 7eca3c8cacc0460b858e9f2678fcc52f2a68b75c..f3181fd45e1c5200a77598542f36bec3b94919cc 100755 (executable)
@@ -397,6 +397,10 @@ def main():
         else:
             logger.log(['E: ', msg])
 
+    # Lock tables so that nobody can change things underneath us
+    session.execute("LOCK TABLE src_associations IN SHARE MODE")
+    session.execute("LOCK TABLE bin_associations IN SHARE MODE")
+
     for s in suites:
         component_ids = [ c.component_id for c in s.components ]
         if s.untouchable and not force:
index 0e0d33a94ea85f8d3b573ada62a57145887c726a..347945a3e8c037ac6a0d428b5a798592e8e69a7b 100755 (executable)
@@ -110,9 +110,16 @@ def create_directories():
     # Process directories from dak.conf
     process_tree(Cnf, "Dir")
 
+    # Hardcode creation of the unchecked directory
+    if Cnf.has_key("Dir::Base"):
+        do_dir(os.path.join(Cnf["Dir::Base"], "queue", "unchecked"), 'unchecked directory')
+
     # Process queue directories
     for queue in session.query(PolicyQueue):
         do_dir(queue.path, '%s queue' % queue.queue_name)
+        # If we're doing the NEW queue, make sure it has a COMMENTS directory
+        if queue.queue_name == 'new':
+            do_dir(os.path.join(queue.path, "COMMENTS"), '%s queue comments' % queue.queue_name)
 
     for config_name in [ "Rm::LogFile",
                          "Import-Archive::ExportDir" ]:
index 175376709a14405dcafb737eb8f375fd389141ed..f52836f8b2c32881cc0f894a79411115d717ee01 100755 (executable)
@@ -52,8 +52,9 @@ def build_mapping(archive, session):
       JOIN files ON files.id=dsc_files.file
       JOIN files_archive_map ON files.id = files_archive_map.file_id
       JOIN component ON files_archive_map.component_id = component.id
+      JOIN files_archive_map fam_dsc ON fam_dsc.file_id=source.file AND fam_dsc.component_id=component.id AND fam_dsc.archive_id=files_archive_map.archive_id
     WHERE files_archive_map.archive_id = :archive_id
-    ORDER BY source, version
+    ORDER BY source, version, component.id, files.filename
     """
 
     query_binaries = """
index 9f9a39cf2154e4465dddb688798778721c7f235e..55397c7dc79a4e2df577397d9262a6addb047df1 100755 (executable)
@@ -125,7 +125,7 @@ def takenover_binaries(upload, missing, session):
     binaries = set([x.package for x in upload.binaries])
     for m in missing:
         if m['type'] != 'dsc':
-            binaries.remove(m['package'])
+            binaries.discard(m['package'])
     if binaries:
         source = upload.binaries[0].source.source
         suite = upload.target_suite.overridesuite or \
@@ -155,10 +155,11 @@ def print_new (upload, missing, indexed, session, file=sys.stdout):
             package = m['package']
         section = m['section']
         priority = m['priority']
+        included = "" if m['included'] else "NOT UPLOADED"
         if indexed:
-            line = "(%s): %-20s %-20s %-20s" % (index, package, priority, section)
+            line = "(%s): %-20s %-20s %-20s %s" % (index, package, priority, section, included)
         else:
-            line = "%-20s %-20s %-20s" % (package, priority, section)
+            line = "%-20s %-20s %-20s %s" % (package, priority, section, included)
         line = line.strip()
         if not m['valid']:
             line = line + ' [!]'
@@ -218,7 +219,8 @@ def edit_new (overrides, upload, session):
             type, pkg = pkg.split(':', 1)
         else:
             type = 'deb'
-        if (type, pkg) not in overrides_map:
+        o = overrides_map.get((type, pkg), None)
+        if o is None:
             utils.warn("Ignoring unknown package '%s'" % (pkg))
         else:
             if section.find('/') != -1:
@@ -231,6 +233,7 @@ def edit_new (overrides, upload, session):
                     section=section,
                     component=component,
                     priority=priority,
+                    included=o['included'],
                     ))
     return new_overrides
 
index 2890c8615403b4017710e711194de46f7cda7f7b..c8c9f2d01070fd8785b05f2bd870a8b6254f8815 100755 (executable)
@@ -43,8 +43,9 @@ from daklib import daklog
 from daklib import utils
 from daklib.dak_exceptions import CantOpenError, AlreadyLockedError, CantGetLockError
 from daklib.config import Config
-from daklib.archive import ArchiveTransaction
+from daklib.archive import ArchiveTransaction, source_component_from_package_list
 from daklib.urgencylog import UrgencyLog
+from daklib.packagelist import PackageList
 
 import daklib.announce
 
@@ -131,16 +132,23 @@ def comment_accept(upload, srcqueue, comments, transaction):
         overridesuite = session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one()
 
     def binary_component_func(db_binary):
-        override = session.query(Override).filter_by(suite=overridesuite, package=db_binary.package) \
-            .join(OverrideType).filter(OverrideType.overridetype == db_binary.binarytype) \
-            .join(Component).one()
-        return override.component
+        section = db_binary.proxy['Section']
+        component_name = 'main'
+        if section.find('/') != -1:
+            component_name = section.split('/', 1)[0]
+        return session.query(Component).filter_by(component_name=component_name).one()
 
     def source_component_func(db_source):
-        override = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
+        package_list = PackageList(db_source.proxy)
+        component = source_component_from_package_list(package_list, upload.target_suite)
+        if component is not None:
+            return component
+
+        # Fallback for packages without Package-List field
+        query = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
             .join(OverrideType).filter(OverrideType.overridetype == 'dsc') \
-            .join(Component).one()
-        return override.component
+            .join(Component)
+        return query.one().component
 
     all_target_suites = [upload.target_suite]
     all_target_suites.extend([q.suite for q in upload.target_suite.copy_queues])
index 1518d262ad78bc42f221a1630f90804f5578ee1a..9eb23f00309c1d5d30c74bc074ef676b7239e774 100755 (executable)
@@ -463,6 +463,13 @@ def process_it(directory, changes, keyrings, session):
     with daklib.archive.ArchiveUpload(directory, changes, keyrings) as upload:
         processed = action(directory, upload)
         if processed and not Options['No-Action']:
+            session = DBConn().session()
+            history = SignatureHistory.from_signed_file(upload.changes)
+            if history.query(session) is None:
+                session.add(history)
+                session.commit()
+            session.close()
+
             unlink_if_exists(os.path.join(directory, changes.filename))
             for fn in changes.files:
                 unlink_if_exists(os.path.join(directory, fn))
index 13ce3de615ad3c99ce6757f16f8b3a2bf2575804..57e9c8a3780d443cf49b7ecd235487ffcefc36be 100755 (executable)
@@ -185,7 +185,7 @@ def get_upload_data(changesfn):
     uploader = achanges.get('changed-by')
     uploader = re.sub(r'^\s*(\S.*)\s+<.*>',r'\1',uploader)
     with utils.open_file(changesfn) as f:
-        fingerprint = SignedFile(f.read(), keyrings=get_active_keyring_paths()).fingerprint
+        fingerprint = SignedFile(f.read(), keyrings=get_active_keyring_paths(), require_signature=False).fingerprint
     if Cnf.has_key("Show-Deferred::LinkPath"):
         isnew = 0
         suites = get_suites_source_in(achanges['source'])
index 110d522f1a63de13912e89a2b87398306f22ddb5..8354de02d2618387c414a0d0d4c85238bdc97575 100644 (file)
@@ -70,6 +70,9 @@ def Debian_arch_to_Debian_triplet(arch):
         else:
             return 'any', 'any', 'any'
 
+    if len(parts) == 2 and parts[0] == 'linux':
+        arch = parts[1]
+
     triplet = _triplettable()[1].get(arch, None)
     if triplet is None:
         return None
@@ -85,9 +88,9 @@ def match_architecture(arch, wildcard):
     triplet_arch = Debian_arch_to_Debian_triplet(arch)
     triplet_wildcard = Debian_arch_to_Debian_triplet(wildcard)
 
-    if len(triplet_arch) != 3:
+    if triplet_arch is None or len(triplet_arch) != 3:
         raise InvalidArchitecture('{0} is not a valid architecture name'.format(arch))
-    if len(triplet_wildcard) != 3:
+    if triplet_wildcard is None or len(triplet_wildcard) != 3:
         raise InvalidArchitecture('{0} is not a valid architecture name or wildcard'.format(wildcard))
 
     for i in range(0,3):
index 372ab8a947b67c3cb0f8eefad470ae31fafd6866..04c55c0ae8f45ba0e7b4f63870b24a627fe191e5 100644 (file)
@@ -33,6 +33,7 @@ from datetime import datetime
 import os
 import shutil
 from sqlalchemy.orm.exc import NoResultFound
+from sqlalchemy.orm import object_session
 import sqlalchemy.exc
 import tempfile
 import traceback
@@ -540,6 +541,34 @@ class ArchiveTransaction(object):
             self.rollback()
         return None
 
+def source_component_from_package_list(package_list, suite):
+    """Get component for a source package
+
+    This function will look at the Package-List field to determine the
+    component the source package belongs to. This is the first component
+    the source package provides binaries for (first with respect to the
+    ordering of components).
+
+    It the source package has no Package-List field, None is returned.
+
+    @type  package_list: L{daklib.packagelist.PackageList}
+    @param package_list: package list of the source to get the override for
+
+    @type  suite: L{daklib.dbconn.Suite}
+    @param suite: suite to consider for binaries produced
+
+    @rtype:  L{daklib.dbconn.Component} or C{None}
+    @return: component for the given source or C{None}
+    """
+    if package_list.fallback:
+        return None
+    session = object_session(suite)
+    packages = package_list.packages_for_suite(suite)
+    components = set(p.component for p in packages)
+    query = session.query(Component).order_by(Component.ordering) \
+            .filter(Component.component_name.in_(components))
+    return query.first()
+
 class ArchiveUpload(object):
     """handle an upload
 
@@ -739,6 +768,23 @@ class ArchiveUpload(object):
         suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
         return suites
 
+    def _check_new_binary_overrides(self, suite):
+        new = False
+
+        binaries = self.changes.binaries
+        source = self.changes.source
+        if source is not None and not source.package_list.fallback:
+            packages = source.package_list.packages_for_suite(suite)
+            binaries = [ entry for entry in packages ]
+
+        for b in binaries:
+            override = self._binary_override(suite, b)
+            if override is None:
+                self.warnings.append('binary:{0} is NEW.'.format(b.name))
+                new = True
+
+        return new
+
     def _check_new(self, suite):
         """Check if upload is NEW
 
@@ -753,12 +799,8 @@ class ArchiveUpload(object):
         new = False
 
         # Check for missing overrides
-        for b in self.changes.binaries:
-            override = self._binary_override(suite, b)
-            if override is None:
-                self.warnings.append('binary:{0} is NEW.'.format(b.control['Package']))
-                new = True
-
+        if self._check_new_binary_overrides(suite):
+            new = True
         if self.changes.source is not None:
             override = self._source_override(suite, self.changes.source)
             if override is None:
@@ -804,7 +846,7 @@ class ArchiveUpload(object):
         @type  suite: L{daklib.dbconn.Suite}
         @param suite: suite to get override for
 
-        @type  binary: L{daklib.upload.Binary}
+        @type  binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
         @param binary: binary to get override for
 
         @rtype:  L{daklib.dbconn.Override} or C{None}
@@ -817,7 +859,7 @@ class ArchiveUpload(object):
         if mapped_component is None:
             return None
 
-        query = self.session.query(Override).filter_by(suite=suite, package=binary.control['Package']) \
+        query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
                 .join(Component).filter(Component.component_name == mapped_component.component_name) \
                 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
 
@@ -841,10 +883,13 @@ class ArchiveUpload(object):
         if suite.overridesuite is not None:
             suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
 
-        # XXX: component for source?
         query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
                 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
 
+        component = source_component_from_package_list(source.package_list, suite)
+        if component is not None:
+            query = query.filter(Override.component == component)
+
         try:
             return query.one()
         except NoResultFound:
@@ -889,6 +934,7 @@ class ArchiveUpload(object):
             # Validate signatures and hashes before we do any real work:
             for chk in (
                     checks.SignatureAndHashesCheck,
+                    checks.SignatureTimestampCheck,
                     checks.ChangesCheck,
                     checks.ExternalHashesCheck,
                     checks.SourceCheck,
index ccf7feb4e9e61b4510a32d0d8c17b07f487dc251..26439aaa798e2726b5ca5cb21d6f5f8a42258c61 100644 (file)
@@ -31,11 +31,12 @@ from daklib.regexes import *
 from daklib.textutils import fix_maintainer, ParseMaintError
 import daklib.lintian as lintian
 import daklib.utils as utils
-from daklib.upload import InvalidHashException
+import daklib.upload
 
 import apt_inst
 import apt_pkg
 from apt_pkg import version_compare
+import datetime
 import errno
 import os
 import subprocess
@@ -109,6 +110,16 @@ class Check(object):
         return False
 
 class SignatureAndHashesCheck(Check):
+    def check_replay(self, upload):
+        # Use private session as we want to remember having seen the .changes
+        # in all cases.
+        session = upload.session
+        history = SignatureHistory.from_signed_file(upload.changes)
+        r = history.query(session)
+        if r is not None:
+            raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
+        return True
+
     """Check signature of changes and dsc file (if included in upload)
 
     Make sure the signature is valid and done by a known user.
@@ -117,6 +128,7 @@ class SignatureAndHashesCheck(Check):
         changes = upload.changes
         if not changes.valid_signature:
             raise Reject("Signature for .changes not valid.")
+        self.check_replay(upload)
         self._check_hashes(upload, changes.filename, changes.files.itervalues())
 
         source = None
@@ -149,15 +161,32 @@ class SignatureAndHashesCheck(Check):
         try:
             for f in files:
                 f.check(upload.directory)
-        except IOError as e:
-            if e.errno == errno.ENOENT:
-                raise Reject('{0} refers to non-existing file: {1}\n'
-                             'Perhaps you need to include it in your upload?'
-                             .format(filename, os.path.basename(e.filename)))
-            raise
-        except InvalidHashException as e:
+        except daklib.upload.FileDoesNotExist as e:
+            raise Reject('{0}: {1}\n'
+                         'Perhaps you need to include the file in your upload?'
+                         .format(filename, unicode(e)))
+        except daklib.upload.UploadException as e:
             raise Reject('{0}: {1}'.format(filename, unicode(e)))
 
+class SignatureTimestampCheck(Check):
+    """Check timestamp of .changes signature"""
+    def check(self, upload):
+        changes = upload.changes
+
+        now = datetime.datetime.utcnow()
+        timestamp = changes.signature_timestamp
+        age = now - timestamp
+
+        age_max = datetime.timedelta(days=365)
+        age_min = datetime.timedelta(days=-7)
+
+        if age > age_max:
+            raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days))
+        if age < age_min:
+            raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days)))
+
+        return True
+
 class ChangesCheck(Check):
     """Check changes file for syntax errors."""
     def check(self, upload):
@@ -271,7 +300,7 @@ class BinaryCheck(Check):
         fn = binary.hashed_file.filename
         control = binary.control
 
-        for field in ('Package', 'Architecture', 'Version', 'Description'):
+        for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
             if field not in control:
                 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
 
@@ -332,6 +361,11 @@ class BinaryCheck(Check):
                 except:
                     raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
 
+        # "Multi-Arch: no" breaks wanna-build, #768353
+        multi_arch = control.get("Multi-Arch")
+        if multi_arch == 'no':
+            raise Reject('{0}: Multi-Arch: no support in Debian is broken (#768353)'.format(fn))
+
 class BinaryTimestampCheck(Check):
     """check timestamps of files in binary packages
 
@@ -612,17 +646,53 @@ transition is done.""".format(transition_source, currentlymsg, expected,t["rm"])
         return None
 
 class NoSourceOnlyCheck(Check):
+    def is_source_only_upload(self, upload):
+        changes = upload.changes
+        if changes.source is not None and len(changes.binaries) == 0:
+            return True
+        return False
+
     """Check for source-only upload
 
     Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
     set. Otherwise they are rejected.
+
+    Source-only uploads are only accepted for source packages having a
+    Package-List field that also lists architectures per package. This
+    check can be disabled via
+    Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
+
+    Source-only uploads to NEW are only allowed if
+    Dinstall::AllowSourceOnlyNew is set.
+
+    Uploads not including architecture-independent packages are only
+    allowed if Dinstall::AllowNoArchIndepUploads is set.
+
     """
     def check(self, upload):
-        if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
+        if not self.is_source_only_upload(upload):
             return True
+
+        allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
+        allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
+        allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
+        allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads')
         changes = upload.changes
-        if changes.source is not None and len(changes.binaries) == 0:
+
+        if not allow_source_only_uploads:
             raise Reject('Source-only uploads are not allowed.')
+        if not allow_source_only_uploads_without_package_list \
+           and changes.source.package_list.fallback:
+            raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
+        if not allow_source_only_new and upload.new:
+            raise Reject('Source-only uploads to NEW are not allowed.')
+
+        if not allow_no_arch_indep_uploads \
+           and 'all' not in changes.architectures \
+           and 'experimental' not in changes.distributions \
+           and changes.source.package_list.has_arch_indep_packages():
+            raise Reject('Uploads not including architecture-independent packages are not allowed.')
+
         return True
 
 class LintianCheck(Check):
index c1f9c706e02a443145c2065f5cc66850d9e5952b..867f7e3692589a5c26d1f1bc059cb40d7a62298f 100644 (file)
@@ -158,6 +158,8 @@ class CommandFile(object):
             section = sections.section
             if 'Uploader' in section:
                 self.uploader = section['Uploader']
+            if 'Cc' in section:
+                self.cc.append(section['Cc'])
             # TODO: Verify first section has valid Archive field
             if 'Archive' not in section:
                 raise CommandError('No Archive field in first section.')
@@ -306,13 +308,13 @@ class CommandFile(object):
 
         self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to)])
 
-        count = 0
+        sources = []
         for entry in session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr_from):
             self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to), 'source={0}'.format(entry.source)])
             entry.fingerprint = fpr_to
-            count += 1
+            sources.append(entry.source)
 
-        self.result.append('Migrated {0} to {1}.\n{2} acl entries changed.'.format(fpr_hash_from, fpr_hash_to, count))
+        self.result.append('Migrated {0} to {1}.\n{2} acl entries changed: {3}'.format(fpr_hash_from, fpr_hash_to, len(sources), ", ".join(sources)))
 
         session.commit()
 
index 1bf82931577f0fb5c1918371821cec945dca9c33..c2621e36d150a38b042fd25182e15ff7374cbb87 100644 (file)
@@ -572,6 +572,12 @@ class DBBinary(ORMObject):
         stanza = self.read_control()
         return apt_pkg.TagSection(stanza)
 
+    @property
+    def proxy(self):
+        session = object_session(self)
+        query = session.query(BinaryMetadata).filter_by(binary=self)
+        return MetadataProxy(session, query)
+
 __all__.append('DBBinary')
 
 @session_wrapper
@@ -1853,6 +1859,9 @@ class SignatureHistory(ORMObject):
         self.contents_sha1 = signed_file.contents_sha1()
         return self
 
+    def query(self, session):
+        return session.query(SignatureHistory).filter_by(fingerprint=self.fingerprint, signature_timestamp=self.signature_timestamp, contents_sha1=self.contents_sha1).first()
+
 __all__.append('SignatureHistory')
 
 ################################################################################
@@ -1979,6 +1988,12 @@ class DBSource(ORMObject):
             fileset.add(name)
         return fileset
 
+    @property
+    def proxy(self):
+        session = object_session(self)
+        query = session.query(SourceMetadata).filter_by(source=self)
+        return MetadataProxy(session, query)
+
 __all__.append('DBSource')
 
 @session_wrapper
@@ -2473,6 +2488,37 @@ __all__.append('SourceMetadata')
 
 ################################################################################
 
+class MetadataProxy(object):
+    def __init__(self, session, query):
+        self.session = session
+        self.query = query
+
+    def _get(self, key):
+        metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
+        if metadata_key is None:
+            return None
+        metadata = self.query.filter_by(key=metadata_key).first()
+        return metadata
+
+    def __contains__(self, key):
+        if self._get(key) is not None:
+            return True
+        return False
+
+    def __getitem__(self, key):
+        metadata = self._get(key)
+        if metadata is None:
+            raise KeyError
+        return metadata.value
+
+    def get(self, key, default=None):
+        try:
+            return self[key]
+        except KeyError:
+            return default
+
+################################################################################
+
 class VersionCheck(ORMObject):
     def __init__(self, *args, **kwargs):
        pass
index 9935c6d554a6eedc0658ed4cc14be1e5c83687ad..654f69a53cf0f0d973eb97aa9a9233dc99f541ae 100644 (file)
@@ -78,8 +78,11 @@ class SignedFile(object):
         self.keyrings = keyrings
 
         self.valid = False
+        self.expired = False
+        self.invalid = False
         self.fingerprint = None
         self.primary_fingerprint = None
+        self.signature_id = None
 
         self._verify(data, require_signature)
 
@@ -112,6 +115,9 @@ class SignedFile(object):
                 for line in self.status.splitlines():
                     self._parse_status(line)
 
+                if self.invalid:
+                    self.valid = False
+
                 if require_signature and not self.valid:
                     raise GpgException("No valid signature found. (GPG exited with status code %s)\n%s" % (exit_code, self.stderr))
 
@@ -143,16 +149,25 @@ class SignedFile(object):
 
         return dict( (fd, "".join(read_lines[fd])) for fd in read_lines.keys() )
 
-    def _parse_date(self, value):
-        """parse date string in YYYY-MM-DD format
+    def _parse_timestamp(self, timestamp, datestring=None):
+        """parse timestamp in GnuPG's format
 
         @rtype:   L{datetime.datetime}
-        @returns: datetime objects for 0:00 on the given day
+        @returns: datetime object for the given timestamp
         """
-        year, month, day = value.split('-')
-        date = datetime.date(int(year), int(month), int(day))
-        time = datetime.time(0, 0)
-        return datetime.datetime.combine(date, time)
+        # The old implementation did only return the date. As we already
+        # used this for replay production, return the legacy value for
+        # old signatures.
+        if datestring is not None:
+            year, month, day = datestring.split('-')
+            date = datetime.date(int(year), int(month), int(day))
+            time = datetime.time(0, 0)
+            if date < datetime.date(2014, 8, 4):
+                return datetime.datetime.combine(date, time)
+
+        if 'T' in timestamp:
+            raise Exception('No support for ISO 8601 timestamps.')
+        return datetime.datetime.utcfromtimestamp(long(timestamp))
 
     def _parse_status(self, line):
         fields = line.split()
@@ -163,23 +178,43 @@ class SignedFile(object):
         #             <expire-timestamp> <sig-version> <reserved> <pubkey-algo>
         #             <hash-algo> <sig-class> <primary-key-fpr>
         if fields[1] == "VALIDSIG":
+            if self.fingerprint is not None:
+                raise GpgException("More than one signature is not (yet) supported.")
             self.valid = True
             self.fingerprint = fields[2]
             self.primary_fingerprint = fields[11]
-            self.signature_timestamp = self._parse_date(fields[3])
+            self.signature_timestamp = self._parse_timestamp(fields[4], fields[3])
 
-        if fields[1] == "BADARMOR":
+        elif fields[1] == "BADARMOR":
             raise GpgException("Bad armor.")
 
-        if fields[1] == "NODATA":
+        elif fields[1] == "NODATA":
             raise GpgException("No data.")
 
-        if fields[1] == "DECRYPTION_FAILED":
+        elif fields[1] == "DECRYPTION_FAILED":
             raise GpgException("Decryption failed.")
 
-        if fields[1] == "ERROR":
+        elif fields[1] == "ERROR":
             raise GpgException("Other error: %s %s" % (fields[2], fields[3]))
 
+        elif fields[1] == "SIG_ID":
+            if self.signature_id is not None:
+                raise GpgException("More than one signature id.")
+            self.signature_id = fields[2]
+
+        elif fields[1] in ('PLAINTEXT', 'GOODSIG', 'NOTATION_NAME', 'NOTATION_DATA', 'SIGEXPIRED', 'KEYEXPIRED', 'POLICY_URL'):
+            pass
+
+        elif fields[1] in ('EXPSIG', 'EXPKEYSIG'):
+            self.expired = True
+            self.invalid = True
+
+        elif fields[1] in ('REVKEYSIG', 'BADSIG', 'ERRSIG', 'KEYREVOKED', 'NO_PUBKEY'):
+            self.invalid = True
+
+        else:
+            raise GpgException("Keyword '{0}' from GnuPG was not expected.".format(fields[1]))
+
     def _exec_gpg(self, stdin, stdout, stderr, statusfd):
         try:
             if stdin != 0:
@@ -200,7 +235,8 @@ class SignedFile(object):
                     "--no-default-keyring",
                     "--batch",
                     "--no-tty",
-                    "--trust-model", "always"]
+                    "--trust-model", "always",
+                    "--fixed-list-mode"]
             for k in self.keyrings:
                 args.append("--keyring=%s" % k)
             args.extend(["--decrypt", "-"])
index 82653a0b2978125df6d2457ded6d7e4aa0d3ae88..4a671839e11733b6085c298a2651c1d99d09174b 100644 (file)
@@ -27,17 +27,20 @@ class InvalidSource(Exception):
 class PackageListEntry(object):
     def __init__(self, name, package_type, section, component, priority, **other):
         self.name = name
-        self.package_type = package_type
+        self.type = package_type
         self.section = section
         self.component = component
         self.priority = priority
         self.other = other
-    @property
-    def architectures(self):
+
+        self.architectures = self._architectures()
+
+    def _architectures(self):
         archs = self.other.get("arch", None)
         if archs is None:
             return None
         return archs.split(',')
+
     def built_on_architecture(self, architecture):
         archs = self.architectures
         if archs is None:
@@ -46,9 +49,12 @@ class PackageListEntry(object):
             if match_architecture(architecture, arch):
                 return True
         return False
+
     def built_in_suite(self, suite):
         built = False
         for arch in suite.architectures:
+            if arch.arch_string == 'source':
+                continue
             built_on_arch = self.built_on_architecture(arch.arch_string)
             if built_on_arch:
                 return True
@@ -58,17 +64,25 @@ class PackageListEntry(object):
 
 class PackageList(object):
     def __init__(self, source):
-        self._source = source
-        if 'Package-List' in self._source:
-            self._parse()
-        elif 'Binary' in self._source:
-            self._parse_fallback()
+        if 'Package-List' in source:
+            self._parse(source)
+        elif 'Binary' in source:
+            self._parse_fallback(source)
         else:
             raise InvalidSource('Source package has neither Package-List nor Binary field.')
-    def _parse(self):
-        self.package_list = {}
 
-        for line in self._source['Package-List'].split("\n"):
+        self.fallback = any(entry.architectures is None for entry in self.package_list)
+
+    def _binaries(self, source):
+        return set(name.strip() for name in source['Binary'].split(","))
+
+    def _parse(self, source):
+        self.package_list = []
+
+        binaries_binary = self._binaries(source)
+        binaries_package_list = set()
+
+        for line in source['Package-List'].split("\n"):
             if not line:
                 continue
             fields = line.split()
@@ -78,17 +92,26 @@ class PackageList(object):
             # <name> <type> <component/section> <priority> [arch=<arch>[,<arch>]...]
             name = fields[0]
             package_type = fields[1]
-            component, section = extract_component_from_section(fields[2])
+            section, component = extract_component_from_section(fields[2])
             priority = fields[3]
             other = dict(kv.split('=', 1) for kv in fields[4:])
 
+            if name in binaries_package_list:
+                raise InvalidSource("Package-List has two entries for '{0}'.".format(name))
+            if name not in binaries_binary:
+                raise InvalidSource("Package-List lists {0} which is not listed in Binary.".format(name))
+            binaries_package_list.add(name)
+
             entry = PackageListEntry(name, package_type, section, component, priority, **other)
-            self.package_list[name] = entry
+            self.package_list.append(entry)
+
+        if len(binaries_binary) != len(binaries_package_list):
+            raise InvalidSource("Package-List and Binaries fields have a different number of entries.")
 
-    def _parse_fallback(self):
-        self.package_list = {}
+    def _parse_fallback(self, source):
+        self.package_list = []
 
-        for binary in self._source['Binary'].split():
+        for binary in self._binaries(source):
             name = binary
             package_type = None
             component = None
@@ -97,11 +120,11 @@ class PackageList(object):
             other = dict()
 
             entry = PackageListEntry(name, package_type, section, component, priority, **other)
-            self.package_list[name] = entry
+            self.package_list.append(entry)
 
     def packages_for_suite(self, suite):
         packages = []
-        for entry in self.package_list.values():
+        for entry in self.package_list:
             built = entry.built_in_suite(suite)
             if built or built is None:
                 packages.append(entry)
@@ -109,7 +132,7 @@ class PackageList(object):
 
     def has_arch_indep_packages(self):
         has_arch_indep = False
-        for entry in self.package_list.values():
+        for entry in self.package_list:
             built = entry.built_on_architecture('all')
             if built:
                 return True
@@ -119,7 +142,7 @@ class PackageList(object):
 
     def has_arch_dep_packages(self):
         has_arch_dep = False
-        for entry in self.package_list.values():
+        for entry in self.package_list:
             built_on_all = entry.built_on_architecture('all')
             if built_on_all == False:
                 return True
index aeed9a2c19cbfb679274520e3418a73f42dc3722..dad9e9d1ec20c685848dac377ae23ecf87e70942 100644 (file)
@@ -20,6 +20,7 @@ from .config import Config
 from .dbconn import BinaryMetadata, Component, MetadataKey, Override, OverrideType, Suite, get_mapped_component
 from .fstransactions import FilesystemTransaction
 from .regexes import re_file_changes, re_file_safe
+from .packagelist import PackageList
 import daklib.utils as utils
 
 import errno
@@ -150,25 +151,14 @@ class PolicyQueueUploadHandler(object):
             .filter(Override.component == component)
         return query.first()
 
-    def _binary_override(self, binary, component_name):
-        package = binary.package
+    def _binary_override(self, name, binarytype, component_name):
         suite = self._overridesuite
-        overridetype = binary.binarytype
         component = get_mapped_component(component_name, self.session)
-        query = self.session.query(Override).filter_by(package=package, suite=suite) \
-            .join(OverrideType).filter(OverrideType.overridetype == overridetype) \
+        query = self.session.query(Override).filter_by(package=name, suite=suite) \
+            .join(OverrideType).filter(OverrideType.overridetype == binarytype) \
             .filter(Override.component == component)
         return query.first()
 
-    def _binary_metadata(self, binary, key):
-        metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
-        if metadata_key is None:
-            return None
-        metadata = self.session.query(BinaryMetadata).filter_by(binary=binary, key=metadata_key).first()
-        if metadata is None:
-            return None
-        return metadata.value
-
     @property
     def _changes_prefix(self):
         changesname = self.upload.changes.changesname
@@ -253,45 +243,54 @@ class PolicyQueueUploadHandler(object):
         missing = []
         components = set()
 
+        source = self.upload.source
+
         if hints is None:
             hints = []
         hints_map = dict([ ((o['type'], o['package']), o) for o in hints ])
 
-        for binary in self.upload.binaries:
-            priority = self._binary_metadata(binary, 'Priority')
-            section = self._binary_metadata(binary, 'Section')
+        def check_override(name, type, priority, section, included):
             component = 'main'
             if section.find('/') != -1:
                 component = section.split('/', 1)[0]
-            override = self._binary_override(binary, component)
-            if override is None and not any(o['package'] == binary.package and o['type'] == binary.binarytype for o in missing):
-                hint = hints_map.get((binary.binarytype, binary.package))
+            override = self._binary_override(name, type, component)
+            if override is None and not any(o['package'] == name and o['type'] == type for o in missing):
+                hint = hints_map.get((type, name))
                 if hint is not None:
                     missing.append(hint)
                     component = hint['component']
                 else:
                     missing.append(dict(
-                            package = binary.package,
+                            package = name,
                             priority = priority,
                             section = section,
                             component = component,
-                            type = binary.binarytype,
+                            type = type,
+                            included = included
                             ))
             components.add(component)
 
-        source = self.upload.source
-        source_component = '(unknown)'
-        for component, in self.session.query(Component.component_name).order_by(Component.ordering):
-            if component in components:
-                source_component = component
-                break
-            else:
-                if source is not None:
-                    if self._source_override(component) is not None:
-                        source_component = component
-                        break
+        for binary in self.upload.binaries:
+            binary_proxy = binary.proxy
+            priority = binary_proxy['Priority']
+            section = binary_proxy['Section']
+            check_override(binary.package, binary.binarytype, priority, section, included=True)
 
         if source is not None:
+            source_proxy = source.proxy
+            package_list = PackageList(source_proxy)
+            if not package_list.fallback:
+                packages = package_list.packages_for_suite(self.upload.target_suite)
+                for p in packages:
+                    check_override(p.name, p.type, p.priority, p.section, included=False)
+
+            # see daklib.archive.source_component_from_package_list
+            # which we cannot use here as we might not have a Package-List
+            # field for old packages
+            query = self.session.query(Component).order_by(Component.ordering) \
+                    .filter(Component.component_name.in_(components))
+            source_component = query.first().component_name
+
             override = self._source_override(source_component)
             if override is None:
                 hint = hints_map.get(('dsc', source.source))
@@ -299,14 +298,15 @@ class PolicyQueueUploadHandler(object):
                     missing.append(hint)
                 else:
                     section = 'misc'
-                    if component != 'main':
-                        section = "{0}/{1}".format(component, section)
+                    if source_component != 'main':
+                        section = "{0}/{1}".format(source_component, section)
                     missing.append(dict(
                             package = source.source,
                             priority = 'extra',
                             section = section,
                             component = source_component,
                             type = 'dsc',
+                            included = True,
                             ))
 
         return missing
index 06cb173604689b3d1b42d2ac7180f80b56c49ffa..dd1c3dd8475da393e8188dd0490e368ae57bb4d2 100644 (file)
@@ -133,10 +133,10 @@ re_includeinpdiff = re.compile(r"(Translation-[a-zA-Z_]+\.(?:bz2|xz))")
 ######################################################################
 
 # Match safe filenames
-re_file_safe = re.compile(r'^[a-zA-Z0-9][a-zA-Z0-9_.:~+-]*$')
+re_file_safe = re.compile(r'^[a-zA-Z0-9][a-zA-Z0-9_.~+-]*$')
 
 # Prefix of binary and source filenames
-_re_file_prefix = r'^(?P<package>[a-z0-9][a-z0-9.+-]+)_(?P<version>[A-Za-z0-9.:~+-]+?)'
+_re_file_prefix = r'^(?P<package>[a-z0-9][a-z0-9.+-]+)_(?P<version>[A-Za-z0-9.~+-]+?)'
 
 # Match binary packages
 # Groups: package, version, architecture, type
index f6a2a815259cfebe54a71ad99fc6f0bc42e364e3..7095b679c05aa991484bcb7393a16a5161ef5552 100644 (file)
@@ -23,22 +23,27 @@ It provides methods to access the included binary and source packages.
 
 import apt_inst
 import apt_pkg
+import errno
 import os
 import re
 
 from daklib.gpg import SignedFile
 from daklib.regexes import *
+import daklib.packagelist
 
-class InvalidChangesException(Exception):
+class UploadException(Exception):
     pass
 
-class InvalidBinaryException(Exception):
+class InvalidChangesException(UploadException):
     pass
 
-class InvalidSourceException(Exception):
+class InvalidBinaryException(UploadException):
     pass
 
-class InvalidHashException(Exception):
+class InvalidSourceException(UploadException):
+    pass
+
+class InvalidHashException(UploadException):
     def __init__(self, filename, hash_name, expected, actual):
         self.filename = filename
         self.hash_name = hash_name
@@ -53,12 +58,18 @@ class InvalidHashException(Exception):
                 "might already be known to the archive software.") \
                 .format(self.hash_name, self.filename, self.expected, self.actual)
 
-class InvalidFilenameException(Exception):
+class InvalidFilenameException(UploadException):
     def __init__(self, filename):
         self.filename = filename
     def __str__(self):
         return "Invalid filename '{0}'.".format(self.filename)
 
+class FileDoesNotExist(UploadException):
+    def __init__(self, filename):
+        self.filename = filename
+    def __str__(self):
+        return "Refers to non-existing file '{0}'".format(self.filename)
+
 class HashedFile(object):
     """file with checksums
     """
@@ -120,8 +131,8 @@ class HashedFile(object):
         @return: C{HashedFile} object for the given file
         """
         path = os.path.join(directory, filename)
-        size = os.stat(path).st_size
         with open(path, 'r') as fh:
+            size = os.fstat(fh.fileno()).st_size
             hashes = apt_pkg.Hashes(fh)
         return cls(filename, size, hashes.md5, hashes.sha1, hashes.sha256, section, priority)
 
@@ -137,13 +148,18 @@ class HashedFile(object):
         """
         path = os.path.join(directory, self.filename)
 
-        size = os.stat(path).st_size
+        try:
+            with open(path) as fh:
+                size = os.fstat(fh.fileno()).st_size
+                hashes = apt_pkg.Hashes(fh)
+        except IOError as e:
+            if e.errno == errno.ENOENT:
+                raise FileDoesNotExist(self.filename)
+            raise
+
         if size != self.size:
             raise InvalidHashException(self.filename, 'size', self.size, size)
 
-        with open(path) as fh:
-            hashes = apt_pkg.Hashes(fh)
-
         if hashes.md5 != self.md5sum:
             raise InvalidHashException(self.filename, 'md5sum', self.md5sum, hashes.md5)
 
@@ -273,6 +289,14 @@ class Changes(object):
         """
         return self._signed_file.valid
 
+    @property
+    def signature_timestamp(self):
+        return self._signed_file.signature_timestamp
+
+    @property
+    def contents_sha1(self):
+        return self._signed_file.contents_sha1
+
     @property
     def architectures(self):
         """list of architectures included in the upload
@@ -447,6 +471,10 @@ class Binary(object):
             version = self.control['Version']
         return (match.group('package'), version)
 
+    @property
+    def name(self):
+        return self.control['Package']
+
     @property
     def type(self):
         """package type ('deb' or 'udeb')
@@ -495,6 +523,11 @@ class Source(object):
         @type: dict-like
         """
 
+        self.package_list = daklib.packagelist.PackageList(self.dsc)
+        """Information about packages built by the source.
+        @type: daklib.packagelist.PackageList
+        """
+
         self._files = None
 
     @classmethod
index 1d26a519dcffadfb3ce471814b5d56be26a1737e..e395adf9ccb028373936743b890e0fabfde599fd 100644 (file)
@@ -6,8 +6,8 @@ Rough Guide to doing Stable Point Releases in Debian
 suite=stable
 suitename=wheezy
 pusuite=proposed-updates
-oldrev=7.5
-newrev=7.6
+oldrev=7.6
+newrev=7.7
 export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars
 . $SCRIPTVARS
 . "${configdir}/common"
@@ -43,14 +43,17 @@ cp /srv/release.debian.org/www/${suitename}/${newrev%%.*}/${newrev}/${suitename}
 dak control-suite --add ${suitename}-r0 < ${suitename}-r0-additions.cs
 
 - sync with stable RM if there is any propup needed. do it, if so:
-like, cat /srv/release.debian.org/www/squeeze/6.0/6.0.4/propup.unstable | dak control-suite --add unstable
-Note: unstable first, then testing
+cp /srv/release.debian.org/www/${suitename}/${newrev%%.*}/${newrev}/propups.unstable .
+dak control-suite --force --add unstable <propups.unstable
+cp /srv/release.debian.org/www/${suitename}/${newrev%%.*}/${newrev}/propups.testing .
+dak control-suite --force --add testing <propups.testing
+
 - ask rms if they have RMs to do.
 - and then check if they have a d-i update. if so, bash:
 # set dioldver to "empty" if there is no old to remove
-diver=20130613+deb7u2+b1
+diver=20130613+deb7u2+b3
 dak copy-installer -s ${pusuite} -d ${suite} ${diver}
-dioldver=20130613+deb7u1+b2
+dioldver=20130613+deb7u2+b1
 cd $ftpdir/dists/${suite}/main
 if [ "${dioldver}" != "empty" ]; then
     for iarch in $(dak admin s-a list-arch ${suite}); do
index b5955731a068bdf6c426f9b33305b6ca8317f16d..49723985f44bf226dd3d771fb0fd29ea48cbcbf8 100644 (file)
@@ -8,6 +8,7 @@ DEBIAN-SPECIFIC NOTES                                  -*- mode: Org -*-
 - Change first line to NOTOK, add comment "Moving back to unchecked."
 - dak process-policy new; dak clean-suites
 - cd /srv/security-master.debian.org/queue/reject
+- dak admin forget-signature bla.changes
 - dcmd mv -n bla.changes ../unchecked
 - /srv/security-master.debian.org/dak/config/debian-security/cron.unchecked
 - cronon
index c6325233256b011c71fdd766d097c841c7f1590e..684dbed154f45236e6244ecc8ec97bf3ae6be583 100644 (file)
@@ -1,6 +1,6 @@
-arch <- c("source", "all", "amd64", "i386", "alpha", "arm", "armel", "armhf", "hppa", "hurd-i386", "ia64",
-       "kfreebsd-amd64", "kfreebsd-i386", "mips", "mipsel", "powerpc", "s390", "s390x", "sparc")
-palette(c("midnightblue", "gold", "turquoise", "cyan", "black", "red", "OrangeRed", "green3", "blue", "magenta",
+arch <- c("source", "all", "amd64", "arm64", "i386", "alpha", "arm", "armel", "armhf", "hppa", "hurd-i386", "ia64",
+       "kfreebsd-amd64", "kfreebsd-i386", "mips", "mipsel", "powerpc", "ppc64el", "s390", "s390x", "sparc")
+palette(c("midnightblue", "gold", "turquoise", "pink", "cyan", "black", "red", "OrangeRed", "green3", "blue", "magenta",
        "cornsilk3", "darkolivegreen3", "tomato4", "violetred2","thistle4", "steelblue2", "springgreen4",
        "salmon","gray"))
 cname <- c("date",arch)
index 816febc6188329a3a2051917b327664b30e7a266..7af638a05d1bdcb19e448b7b0d32b203208fdf61 100755 (executable)
@@ -6,4 +6,4 @@ set -u
 FTPDIR="/srv/upload.debian.org/ftp/pub/UploadQueue/"
 SSHDIR="/srv/upload.debian.org/UploadQueue/"
 
-yes n | find ${FTPDIR} -type f -mmin +15 -print0 -exec mv -i --target-directory=${SSHDIR} "{}" +
+find ${FTPDIR} -type f -mmin +15 -print0 -exec mv --no-clobber --target-directory=${SSHDIR} -- "{}" +
index 30fb1d9c9db571b05a68057494f8a662adaea68b..748c28c43bd6e60c62509bb475b2cc46c9bfdc64 100755 (executable)
@@ -86,6 +86,7 @@ if lockfile -r3 "${lockfile}"; then
        --exclude "/s3kr1t" \
        --exclude "/scripts/s3kr1t" \
        --exclude "/tmp/" \
+    --exclude "/public/incoming.debian.org" \
        --delete --delete-after \
        --timeout 3600 \
        -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
index 26379bcc42fc30c94ea0d9ec2704a18eb8e1144a..f135d02e84b25f6fbd22c64195f0f93bbfeef4ea 100755 (executable)
@@ -7,9 +7,9 @@ use IO::Uncompress::AnyUncompress ":all";
 
 my %data;
 my %data2;
-my @archs = ("source", "all", "amd64", "i386", "alpha", "arm", "armel", "armhf", "hppa",
-             "hurd-i386", "ia64", "kfreebsd-amd64", "kfreebsd-i386", "mips", "mipsel", "powerpc", "s390",
-             "s390x", "sparc");
+my @archs = ("source", "all", "amd64", "arm64", "i386", "alpha", "arm", "armel", "armhf", "hppa",
+             "hurd-i386", "ia64", "kfreebsd-amd64", "kfreebsd-i386", "mips", "mipsel", "powerpc",
+             "ppc64el", "s390", "s390x", "sparc");
 
 for my $file (@ARGV) {
   my $FH = new IO::Uncompress::AnyUncompress $file, Transparent => 1
index 57afc92cfbe0de7da171c79aeb3c6a47ac3851a8..42e2c6ad48ba99af6b79a19534b177b2bb36299c 100755 (executable)
@@ -34,5 +34,5 @@ export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars
 WB_DB_DIR=${WB_DB_DIR:-${scriptdir}/nfu}
 
 for arch in $(dak admin s-a list-arch unstable); do
-    wget -q http://buildd.debian.org/stats/$arch-all.txt -O ${WB_DB_DIR}/${arch}-all.txt
+    wget -q http://buildd.debian.org/stats/$arch-all.txt -O ${WB_DB_DIR}/${arch}-all.txt || echo "No w-b dump for ${arch}"
 done
index 140ead9192b88a07faa830c632f86f98b9755e9f..32d007a7cd6262cc91e96d9355485351c2498109 100644 (file)
@@ -100,6 +100,9 @@ Add a suite (origin=, label= and codename= are optional)
 signingkey= will ensure that Release files are signed
 # dak admin suite add-all-arches unstable x.y.z origin=MyDistro label=Master codename=sid signingkey=DDDDDDDD
 
+Add the components to the suite
+# dak admin s-c add unstable main contrib non-free
+
 Re-run dak init-dirs to add new suite directories to /srv/dak
 # dak init-dirs
 
diff --git a/tests/fixtures/gpg/expired-subkey.asc b/tests/fixtures/gpg/expired-subkey.asc
new file mode 100644 (file)
index 0000000..d105f6b
--- /dev/null
@@ -0,0 +1,13 @@
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA512
+
+Valid: expired-subkey
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1
+
+iJwEAQEKAAYFAlLsQwMACgkQ+DxQkDe7kaCctQQAuzx+L6Bo+8oI9zTb04Cg2EAW
+ul4rN7XIqj3Q9/Cy2/+6+ET7GE414cA3KEElrimgAHHNdr6xPOJnEYAHSlMRG0wk
+gP9zk0nAt1ZJRgmWKb2zgbV6DYz7gAcUVaZMd+fixBdn39E3SkMnDHsUhWZNecsG
+BpSvYQJ7pQDnqo9gWbY=
+=AKH9
+-----END PGP SIGNATURE-----
diff --git a/tests/fixtures/gpg/expired.asc b/tests/fixtures/gpg/expired.asc
new file mode 100644 (file)
index 0000000..42b5217
--- /dev/null
@@ -0,0 +1,13 @@
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA512
+
+Valid: expired
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1
+
+iJwEAQEKAAYFAjp4sQMACgkQVDsrrtoETwtbowQAxZ+imlX8u44TCRaJmgSDx6dV
+G+BiNiM7RXbfoYD3jmKWX8ILdxBXyMP2YaPmtRij03h1q8idjol6mxGl2xBrOdbB
+hE7N+67MuvpGB1yBOb6JJQuqLALyoK+efzcqPBEJv3s0eeYbWkB4ZxWRhso1DDnm
+RYieWYyoR9/oNVhsBmE=
+=WR84
+-----END PGP SIGNATURE-----
diff --git a/tests/fixtures/gpg/gnupghome/pubring.gpg b/tests/fixtures/gpg/gnupghome/pubring.gpg
new file mode 100644 (file)
index 0000000..88d7e4d
Binary files /dev/null and b/tests/fixtures/gpg/gnupghome/pubring.gpg differ
diff --git a/tests/fixtures/gpg/gnupghome/secring.gpg b/tests/fixtures/gpg/gnupghome/secring.gpg
new file mode 100644 (file)
index 0000000..a524519
Binary files /dev/null and b/tests/fixtures/gpg/gnupghome/secring.gpg differ
diff --git a/tests/fixtures/gpg/message.asc b/tests/fixtures/gpg/message.asc
new file mode 100644 (file)
index 0000000..5962eba
--- /dev/null
@@ -0,0 +1,6 @@
+-----BEGIN PGP MESSAGE-----
+Version: GnuPG v1
+
+owE7LZXEEMIWKBWWmJOZYqWQl69bnJmel1hSWpTKBQA=
+=JwM5
+-----END PGP MESSAGE-----
diff --git a/tests/fixtures/gpg/plaintext.txt b/tests/fixtures/gpg/plaintext.txt
new file mode 100644 (file)
index 0000000..c3e7f82
--- /dev/null
@@ -0,0 +1 @@
+Valid: text/plain
diff --git a/tests/fixtures/gpg/valid.asc b/tests/fixtures/gpg/valid.asc
new file mode 100644 (file)
index 0000000..2daae47
--- /dev/null
@@ -0,0 +1,13 @@
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA512
+
+Valid: yes
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1
+
+iJwEAQEKAAYFAlQGNXoACgkQy51cWChgboRrDAP9E/cwAQgF5BpzIEN5Wnus4mf0
+L4QdVPXCVjU4f8YS4FKali0++shPRFxgqBhWaOT9XFR9y0+ZzHjfcXffY0loYMQ6
+JCZdIK0lQ4aPDFqX6892+Aka0ZaijL+20yd9IE+9E7M7rCCW+PgVFRIIKnB7Eyc2
+MkCGwQ91CAOjErXnZPw=
+=UsYp
+-----END PGP SIGNATURE-----
index 8fd2d3a5396565d48c24ff16ce30f0a466758724..3a63c07e93c37ceb94f4d8e08b6fe8cd17b46575 100755 (executable)
@@ -25,6 +25,9 @@ from daklib.architecture import match_architecture
 class MatchArchitecture(DakTestCase):
     def testEqual(self):
         self.assert_(match_architecture('amd64', 'amd64'))
+        self.assert_(match_architecture('linux-amd64', 'linux-amd64'))
+        self.assert_(match_architecture('linux-amd64', 'amd64'))
+        self.assert_(match_architecture('amd64', 'linux-amd64'))
         self.assert_(not match_architecture('amd64', 'i386'))
         self.assert_(match_architecture('kfreebsd-amd64', 'kfreebsd-amd64'))
         self.assert_(not match_architecture('kfreebsd-amd64', 'amd64'))
diff --git a/tests/test_gpg.py b/tests/test_gpg.py
new file mode 100755 (executable)
index 0000000..7c05196
--- /dev/null
@@ -0,0 +1,73 @@
+#! /usr/bin/env python
+#
+# Copyright (C) 2014, Ansgar Burchardt <ansgar@debian.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import datetime
+import unittest
+from base_test import DakTestCase, fixture
+from daklib.gpg import GpgException, SignedFile
+
+keyring = fixture('gpg/gnupghome/pubring.gpg')
+fpr_valid = '0ABB89079CB58F8F94F6F310CB9D5C5828606E84'
+fpr_expired = '05A558AE65B77B559BBE0C4D543B2BAEDA044F0B'
+fpr_expired_subkey = '8865D9EC71713394ADBD8F729F7A24B7F6388CE1'
+
+def verify(filename, require_signature=True):
+    with open(fixture(filename)) as fh:
+        data = fh.read()
+    return SignedFile(data, [keyring], require_signature)
+
+class GpgTest(DakTestCase):
+    def test_valid(self):
+        result = verify('gpg/valid.asc')
+        self.assertTrue(result.valid)
+        self.assertEqual(result.primary_fingerprint, fpr_valid)
+        self.assertEqual(result.contents, "Valid: yes\n")
+        self.assertEqual(result.signature_timestamp, datetime.datetime(2014, 9, 2, 21, 24, 10))
+
+    def test_expired(self):
+        result = verify('gpg/expired.asc', False)
+        self.assertFalse(result.valid)
+        self.assertEqual(result.primary_fingerprint, fpr_expired)
+        self.assertEqual(result.contents, "Valid: expired\n")
+        self.assertEqual(result.signature_timestamp, datetime.datetime(2001, 2, 1, 0, 0, 0))
+
+    def test_expired_assertion(self):
+        with self.assertRaises(GpgException):
+            verify('gpg/expired.asc')
+
+    def test_expired_subkey(self):
+        result = verify('gpg/expired-subkey.asc', False)
+        self.assertFalse(result.valid)
+        self.assertEqual(result.primary_fingerprint, fpr_expired_subkey)
+        self.assertEqual(result.contents, "Valid: expired-subkey\n")
+        self.assertEqual(result.signature_timestamp, datetime.datetime(2014, 2, 1, 0, 0, 0))
+
+    def test_expires_subkey_assertion(self):
+        with self.assertRaises(GpgException):
+            verify('gpg/expired-subkey.asc')
+
+    def test_message_assertion(self):
+        with self.assertRaises(GpgException):
+            verify('gpg/message.asc')
+
+    def test_plain_assertion(self):
+        with self.assertRaises(GpgException):
+            verify('gpg/plaintext.txt')
+
+if __name__ == '__main__':
+    unittest.main()
index 7f3629ab48352737da8352f4c46c3af08a87d0a7..4c2fe4d416b58d1a758d3922fcf4dd7d87f16aed 100644 (file)
@@ -40,7 +40,7 @@ source_any = {
 
 source_all_any = {
     'Package-List': '\n libdune-common-dev deb libdevel optional arch=any\nlibdune-common-doc deb doc optional arch=all\n',
-    'Binary': 'libdune-common-dev libdune-common-doc\n',
+    'Binary': 'libdune-common-dev, libdune-common-doc\n',
     }
 
 source_amd64 = {
@@ -81,6 +81,14 @@ class TestPackageList(DakTestCase):
         p_all_amd64 = pl.packages_for_suite(suite_all_amd64)
         self.assertEqual(len(p_all_amd64), 1)
 
+        p = p_all[0]
+        self.assertEqual(p.name, 'libdune-common-doc')
+        self.assertEqual(p.type, 'deb')
+        self.assertEqual(p.section, 'doc')
+        self.assertEqual(p.component, 'main')
+        self.assertEqual(p.priority, 'optional')
+        self.assertEqual(p.architectures, ['all'])
+
     def testArchAny(self):
         pl = PackageList(source_any)
 
@@ -153,6 +161,10 @@ class TestPackageList(DakTestCase):
         p_kfreebsdi386 = pl.packages_for_suite(suite_kfreebsdi386)
         self.assertEqual(len(p_kfreebsdi386), 0)
 
+        suite_source = FakeSuite('source')
+        p_source = pl.packages_for_suite(suite_source)
+        self.assertEqual(len(p_source), 0)
+
     def testNoArch(self):
         pl = PackageList(source_noarch)
 
index da8a1d6d23b1b44029c3fb2407fd71d21ff721cd..342131f428e8524553c8b8274056193812ed49c7 100644 (file)
@@ -67,6 +67,7 @@ $statusdelay = 30;
               "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
@@ -77,6 +78,7 @@ $statusdelay = 30;
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
index 32ee8fe9f27059edc6927a510706208d4c5292a6..82039880991aaf90e195005898e8d3f0e51c457e 100644 (file)
@@ -67,6 +67,7 @@ $statusdelay = 30;
               "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
@@ -76,6 +77,7 @@ $statusdelay = 30;
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
 
index fc0ce0e224b0372e32083df5ae4a182f7e96058d..1bc4e2c22197b508353fcaaea601938020daac1c 100644 (file)
@@ -66,6 +66,7 @@ $statusdelay = 30;
 @keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
@@ -76,6 +77,7 @@ $statusdelay = 30;
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
index c449198c1aa328714cc7a0337b9ed06f6493ff2e..ce2119f9d45bb4faabdcef9cb58fd2c7fadc637f 100644 (file)
@@ -66,6 +66,7 @@ $statusdelay = 30;
 @keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
@@ -76,6 +77,7 @@ $statusdelay = 30;
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
index f9808fe659c6d40ae95e2b8ce56ec5632cad5476..611f8c3afa1206979541a1c24aff5ccf2f43da50 100644 (file)
@@ -66,6 +66,7 @@ $statusdelay = 30;
 @keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
@@ -76,7 +77,8 @@ $statusdelay = 30;
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
-             "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
 
index e1f997255f4959e2c22adc7b53e21485e93aef23..d6789910c362bab2e27196f4fffd9211d00ee14a 100644 (file)
@@ -67,6 +67,7 @@ $statusdelay = 30;
               "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
@@ -77,6 +78,7 @@ $statusdelay = 30;
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");