]> git.decadent.org.uk Git - dak.git/commitdiff
Merge remote-tracking branch 'refs/remotes/ansgar/pu/multiarchive-1' into merge
authorJoerg Jaspert <joerg@debian.org>
Sat, 2 Jun 2012 13:50:28 +0000 (15:50 +0200)
committerJoerg Jaspert <joerg@debian.org>
Sat, 2 Jun 2012 13:50:28 +0000 (15:50 +0200)
* refs/remotes/ansgar/pu/multiarchive-1:
  add module for filesystem transactions
  Add nicer and stricter regular expressions.
  override, override-disparity: rename suite variable to suite_name
  daklib/dbconn.py: Don't list install_date twice in not-null constraint.
  fix typo in daklib/gpg.py: seqeuence -> sequence
  Allow to set fingerprint in __init__ for DBBinary and DBSource.
  Remove split-done.
  Remove import-known-changes and import-new-files.
  process-upload: document -d, --directory option
  We now allow non-gzip data in udebs

Signed-off-by: Joerg Jaspert <joerg@debian.org>
15 files changed:
config/backports/cron.weekly
config/debian/cron.weekly
config/debian/lintian.tags
dak/dak.py
dak/import_known_changes.py [deleted file]
dak/import_new_files.py [deleted file]
dak/override.py
dak/override_disparity.py
dak/process_upload.py
dak/split_done.py [deleted file]
daklib/dbconn.py
daklib/fstransactions.py [new file with mode: 0644]
daklib/gpg.py
daklib/regexes.py
tests/test_daklib_fstransactions.py [new file with mode: 0755]

index 097aa63cf7fcc3672e9efc3091d946857ebb1c5d..1929e10faa617e64fd26e26adce60fb3dbb35710 100755 (executable)
@@ -29,11 +29,6 @@ if [ ! -z "$(find $ftpdir/pool/ -type d -empty)" ]; then
    find $ftpdir/pool/ -type d -empty | xargs rmdir;
 fi
 
-# Clean up apt-ftparchive's databases
-
-echo "Splitting queue/done"
-dak split-done
-
 # Clean up apt-ftparchive's databases
 cd $configdir
 #echo "Cleanup apt-ftparchive's database"
index 06d3f90a93d6e3a54e81a2afd6f571b93127237a..b75539ed055a46d646a9466fdd90f0fc9cffd12b 100755 (executable)
@@ -31,10 +31,6 @@ if [ ! -z "$(find $ftpdir/pool/ -type d -empty)" ]; then
    find $ftpdir/pool/ -type d -empty | xargs rmdir;
 fi
 
-# Split queue/done
-echo "Splitting queue/done"
-dak split-done > /dev/null
-
 # Do git cleanup stuff
 echo "Doing git stuff"
 cd /srv/ftp.debian.org/git/dak.git
index 62126cb5f08aed81274efdb78f2badf0ce1809a5..c2c53419817630a830dfb51022281d5726958317 100644 (file)
@@ -83,4 +83,3 @@ lintian:
     - dir-or-file-in-mnt
     - dir-or-file-in-opt
     - dir-or-file-in-srv
-    - udeb-uses-non-gzip-data-tarball
index 579548c9470050bc8117f0db94074f4d0451cfbd..b6e77ad493689773f96b929c90a1dc679bc58b8c 100755 (executable)
@@ -131,14 +131,10 @@ def init():
          "Generates override files"),
         ("new-security-install",
          "New way to install a security upload into the archive"),
-        ("split-done",
-         "Split queue/done into a date-based hierarchy"),
         ("stats",
          "Generate statistics"),
         ("bts-categorize",
          "Categorize uncategorized bugs filed against ftp.debian.org"),
-        ("import-known-changes",
-         "import old changes files into known_changes table"),
         ("add-user",
          "Add a user to the archive"),
         ("make-changelog",
diff --git a/dak/import_known_changes.py b/dak/import_known_changes.py
deleted file mode 100755 (executable)
index 4e8068f..0000000
+++ /dev/null
@@ -1,355 +0,0 @@
-#!/usr/bin/env python
-# coding=utf8
-
-"""
-Import known_changes files
-
-@contact: Debian FTP Master <ftpmaster@debian.org>
-@copyright: 2009  Mike O'Connor <stew@debian.org>
-@license: GNU General Public License version 2 or later
-"""
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
-
-################################################################################
-
-
-################################################################################
-
-import sys
-import os
-import logging
-import threading
-from daklib.dbconn import DBConn, get_dbchange, get_policy_queue
-from daklib.config import Config
-import apt_pkg
-from daklib.dak_exceptions import DBUpdateError, InvalidDscError, ChangesUnicodeError
-from daklib.changes import Changes
-from daklib.utils import parse_changes, warn, gpgv_get_status_output, process_gpgv_output
-import traceback
-
-# where in dak.conf all of our configuration will be stowed
-options_prefix = "KnownChanges"
-options_prefix = "%s::Options" % options_prefix
-
-log = logging.getLogger()
-
-################################################################################
-
-
-def usage (exit_code=0):
-    print """Usage: dak import-known-changes [options]
-
-OPTIONS
-     -j n
-        run with n threads concurrently
-
-     -v, --verbose
-        show verbose information messages
-
-     -q, --quiet
-        supress all output but errors
-
-"""
-    sys.exit(exit_code)
-
-def check_signature (sig_filename, data_filename=""):
-    fingerprint = None
-
-    keyrings = [
-        "/home/joerg/keyring/keyrings/debian-keyring.gpg",
-        "/home/joerg/keyring/keyrings/debian-maintainers.gpg",
-        "/home/joerg/keyring/keyrings/debian-role-keys.gpg",
-        "/home/joerg/keyring/keyrings/emeritus-keyring.pgp",
-        "/home/joerg/keyring/keyrings/emeritus-keyring.gpg",
-        "/home/joerg/keyring/keyrings/removed-keys.gpg",
-        "/home/joerg/keyring/keyrings/removed-keys.pgp"
-        ]
-
-    keyringargs = " ".join(["--keyring %s" % x for x in keyrings ])
-
-    # Build the command line
-    status_read, status_write = os.pipe()
-    cmd = "gpgv --status-fd %s %s %s" % (status_write, keyringargs, sig_filename)
-
-    # Invoke gpgv on the file
-    (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
-
-    # Process the status-fd output
-    (keywords, internal_error) = process_gpgv_output(status)
-
-    # If we failed to parse the status-fd output, let's just whine and bail now
-    if internal_error:
-        warn("Couldn't parse signature")
-        return None
-
-    # usually one would check for bad things here. We, however, do not care.
-
-    # Next check gpgv exited with a zero return code
-    if exit_status:
-        warn("Couldn't parse signature")
-        return None
-
-    # Sanity check the good stuff we expect
-    if not keywords.has_key("VALIDSIG"):
-        warn("Couldn't parse signature")
-    else:
-        args = keywords["VALIDSIG"]
-        if len(args) < 1:
-            warn("Couldn't parse signature")
-        else:
-            fingerprint = args[0]
-
-    return fingerprint
-
-
-class EndOfChanges(object):
-    """something enqueued to signify the last change"""
-    pass
-
-
-class OneAtATime(object):
-    """
-    a one space queue which sits between multiple possible producers
-    and multiple possible consumers
-    """
-    def __init__(self):
-        self.next_in_line = None
-        self.read_lock = threading.Condition()
-        self.write_lock = threading.Condition()
-        self.die = False
-
-    def plsDie(self):
-        self.die = True
-        self.write_lock.acquire()
-        self.write_lock.notifyAll()
-        self.write_lock.release()
-
-        self.read_lock.acquire()
-        self.read_lock.notifyAll()
-        self.read_lock.release()
-
-    def enqueue(self, next):
-        self.write_lock.acquire()
-        while self.next_in_line:
-            if self.die:
-                return
-            self.write_lock.wait()
-
-        assert( not self.next_in_line )
-        self.next_in_line = next
-        self.write_lock.release()
-        self.read_lock.acquire()
-        self.read_lock.notify()
-        self.read_lock.release()
-
-    def dequeue(self):
-        self.read_lock.acquire()
-        while not self.next_in_line:
-            if self.die:
-                return
-            self.read_lock.wait()
-
-        result = self.next_in_line
-
-        self.next_in_line = None
-        self.read_lock.release()
-        self.write_lock.acquire()
-        self.write_lock.notify()
-        self.write_lock.release()
-
-        if isinstance(result, EndOfChanges):
-            return None
-
-        return result
-
-class ChangesToImport(object):
-    """A changes file to be enqueued to be processed"""
-    def __init__(self, checkdir, changesfile, count):
-        self.dirpath = checkdir
-        self.changesfile = changesfile
-        self.count = count
-
-    def __str__(self):
-        return "#%d: %s in %s" % (self.count, self.changesfile, self.dirpath)
-
-class ChangesGenerator(threading.Thread):
-    """enqueues changes files to be imported"""
-    def __init__(self, parent, queue):
-        threading.Thread.__init__(self)
-        self.queue = queue
-        self.session = DBConn().session()
-        self.parent = parent
-        self.die = False
-
-    def plsDie(self):
-        self.die = True
-
-    def run(self):
-        cnf = Config()
-        count = 1
-
-        dirs = []
-        dirs.append(cnf['Dir::Done'])
-
-        for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates" ]:
-            queue = get_policy_queue(queue_name)
-            if queue:
-                dirs.append(os.path.abspath(queue.path))
-            else:
-                warn("Could not find queue %s in database" % queue_name)
-
-        for checkdir in dirs:
-            if os.path.exists(checkdir):
-                print "Looking into %s" % (checkdir)
-
-                for dirpath, dirnames, filenames in os.walk(checkdir, topdown=True):
-                    if not filenames:
-                        # Empty directory (or only subdirectories), next
-                        continue
-
-                    for changesfile in filenames:
-                        try:
-                            if not changesfile.endswith(".changes"):
-                                # Only interested in changes files.
-                                continue
-                            count += 1
-
-                            if not get_dbchange(changesfile, self.session):
-                                to_import = ChangesToImport(dirpath, changesfile, count)
-                                if self.die:
-                                    return
-                                self.queue.enqueue(to_import)
-                        except KeyboardInterrupt:
-                            print("got Ctrl-c in enqueue thread.  terminating")
-                            self.parent.plsDie()
-                            sys.exit(1)
-
-        self.queue.enqueue(EndOfChanges())
-
-class ImportThread(threading.Thread):
-    def __init__(self, parent, queue):
-        threading.Thread.__init__(self)
-        self.queue = queue
-        self.session = DBConn().session()
-        self.parent = parent
-        self.die = False
-
-    def plsDie(self):
-        self.die = True
-
-    def run(self):
-        while True:
-            try:
-                if self.die:
-                    return
-                to_import = self.queue.dequeue()
-                if not to_import:
-                    return
-
-                print( "Directory %s, file %7d, (%s)" % (to_import.dirpath[-10:], to_import.count, to_import.changesfile) )
-
-                changes = Changes()
-                changes.changes_file = to_import.changesfile
-                changesfile = os.path.join(to_import.dirpath, to_import.changesfile)
-                changes.changes = parse_changes(changesfile, signing_rules=-1)
-                changes.changes["fingerprint"] = check_signature(changesfile)
-                changes.add_known_changes(to_import.dirpath, session=self.session)
-                self.session.commit()
-
-            except InvalidDscError as line:
-                warn("syntax error in .dsc file '%s', line %s." % (f, line))
-
-            except ChangesUnicodeError:
-                warn("found invalid changes file, not properly utf-8 encoded")
-
-            except KeyboardInterrupt:
-                print("Caught C-c; on ImportThread. terminating.")
-                self.parent.plsDie()
-                sys.exit(1)
-
-            except:
-                self.parent.plsDie()
-                sys.exit(1)
-
-class ImportKnownChanges(object):
-    def __init__(self,num_threads):
-        self.queue = OneAtATime()
-        self.threads = [ ChangesGenerator(self,self.queue) ]
-
-        for i in range(num_threads):
-            self.threads.append( ImportThread(self,self.queue) )
-
-        try:
-            for thread in self.threads:
-                thread.start()
-
-        except KeyboardInterrupt:
-            print("Caught C-c; terminating.")
-            warn("Caught C-c; terminating.")
-            self.plsDie()
-
-    def plsDie(self):
-        traceback.print_stack90
-        for thread in self.threads:
-            print( "STU: before ask %s to die" % thread )
-            thread.plsDie()
-            print( "STU: after ask %s to die" % thread )
-
-        self.threads=[]
-        sys.exit(1)
-
-
-def main():
-    cnf = Config()
-
-    arguments = [('h',"help", "%s::%s" % (options_prefix,"Help")),
-                 ('j',"concurrency", "%s::%s" % (options_prefix,"Concurrency"),"HasArg"),
-                 ('q',"quiet", "%s::%s" % (options_prefix,"Quiet")),
-                 ('v',"verbose", "%s::%s" % (options_prefix,"Verbose")),
-                ]
-
-    args = apt_pkg.parse_commandline(cnf.Cnf, arguments,sys.argv)
-
-    num_threads = 1
-
-    if len(args) > 0:
-        usage()
-
-    if cnf.has_key("%s::%s" % (options_prefix,"Help")):
-        usage()
-
-    level=logging.INFO
-    if cnf.has_key("%s::%s" % (options_prefix,"Quiet")):
-        level=logging.ERROR
-
-    elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")):
-        level=logging.DEBUG
-
-
-    logging.basicConfig( level=level,
-                         format='%(asctime)s %(levelname)s %(message)s',
-                         stream = sys.stderr )
-
-    if Config().has_key( "%s::%s" %(options_prefix,"Concurrency")):
-        num_threads = int(Config()[ "%s::%s" %(options_prefix,"Concurrency")])
-
-    ImportKnownChanges(num_threads)
-
-
-
-
-if __name__ == '__main__':
-    main()
diff --git a/dak/import_new_files.py b/dak/import_new_files.py
deleted file mode 100755 (executable)
index 7a29467..0000000
+++ /dev/null
@@ -1,187 +0,0 @@
-#!/usr/bin/env python
-# coding=utf8
-
-"""
-Import known_changes files
-
-@contact: Debian FTP Master <ftpmaster@debian.org>
-@copyright: 2009  Mike O'Connor <stew@debian.org>
-@license: GNU General Public License version 2 or later
-"""
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
-
-################################################################################
-
-
-################################################################################
-
-import sys
-import os
-import logging
-import threading
-import glob
-import apt_pkg
-from daklib.dbconn import DBConn, get_dbchange, get_policy_queue, session_wrapper, ChangePendingFile, get_location, check_poolfile
-from daklib.config import Config
-from daklib.queue import Upload
-from daklib.utils import poolify
-
-# where in dak.conf all of our configuration will be stowed
-options_prefix = "NewFiles"
-options_prefix = "%s::Options" % options_prefix
-
-log = logging.getLogger()
-
-################################################################################
-
-
-def usage (exit_code=0):
-    print """Usage: dak import-new-files [options]
-
-OPTIONS
-     -v, --verbose
-        show verbose information messages
-
-     -q, --quiet
-        supress all output but errors
-
-"""
-    sys.exit(exit_code)
-
-class ImportNewFiles(object):
-    @session_wrapper
-    def __init__(self, session=None):
-        cnf = Config()
-        try:
-            newq = get_policy_queue('new', session)
-            for changes_fn in glob.glob(newq.path + "/*.changes"):
-                changes_bn = os.path.basename(changes_fn)
-                chg = get_dbchange(changes_bn, session)
-
-                u = Upload()
-                success = u.load_changes(changes_fn)
-                u.pkg.changes_file = changes_bn
-                u.check_hashes()
-
-                if not chg:
-                    chg = u.pkg.add_known_changes(newq.path, newq.policy_queue_id, session)
-                    session.add(chg)
-
-                if not success:
-                    log.critical("failed to load %s" % changes_fn)
-                    sys.exit(1)
-                else:
-                    log.critical("ACCLAIM: %s" % changes_fn)
-
-                files=[]
-                for chg_fn in u.pkg.files.keys():
-                    try:
-                        f = open(os.path.join(newq.path, chg_fn))
-                        cpf = ChangePendingFile()
-                        cpf.filename = chg_fn
-                        cpf.size = u.pkg.files[chg_fn]['size']
-                        cpf.md5sum = u.pkg.files[chg_fn]['md5sum']
-
-                        if u.pkg.files[chg_fn].has_key('sha1sum'):
-                            cpf.sha1sum = u.pkg.files[chg_fn]['sha1sum']
-                        else:
-                            log.warning("Having to generate sha1sum for %s" % chg_fn)
-                            f.seek(0)
-                            cpf.sha1sum = apt_pkg.sha1sum(f)
-
-                        if u.pkg.files[chg_fn].has_key('sha256sum'):
-                            cpf.sha256sum = u.pkg.files[chg_fn]['sha256sum']
-                        else:
-                            log.warning("Having to generate sha256sum for %s" % chg_fn)
-                            f.seek(0)
-                            cpf.sha256sum = apt_pkg.sha256sum(f)
-
-                        session.add(cpf)
-                        files.append(cpf)
-                        f.close()
-                    except IOError:
-                        # Can't find the file, try to look it up in the pool
-                        poolname = poolify(u.pkg.changes["source"], u.pkg.files[chg_fn]["component"])
-                        l = get_location(cnf["Dir::Pool"], u.pkg.files[chg_fn]["component"], session=session)
-                        if not l:
-                            log.critical("ERROR: Can't find location for %s (component %s)" % (chg_fn, u.pkg.files[chg_fn]["component"]))
-
-                        found, poolfile = check_poolfile(os.path.join(poolname, chg_fn),
-                                                         u.pkg.files[chg_fn]['size'],
-                                                         u.pkg.files[chg_fn]["md5sum"],
-                                                         l.location_id,
-                                                         session=session)
-
-                        if found is None:
-                            log.critical("ERROR: Found multiple files for %s in pool" % chg_fn)
-                            sys.exit(1)
-                        elif found is False and poolfile is not None:
-                            log.critical("ERROR: md5sum / size mismatch for %s in pool" % chg_fn)
-                            sys.exit(1)
-                        else:
-                            if poolfile is None:
-                                log.critical("ERROR: Could not find %s in pool" % chg_fn)
-                                sys.exit(1)
-                            else:
-                                chg.poolfiles.append(poolfile)
-
-
-                chg.files = files
-
-
-            session.commit()
-
-        except KeyboardInterrupt:
-            print("Caught C-c; terminating.")
-            utils.warn("Caught C-c; terminating.")
-            self.plsDie()
-
-
-def main():
-    cnf = Config()
-
-    arguments = [('h',"help", "%s::%s" % (options_prefix,"Help")),
-                 ('q',"quiet", "%s::%s" % (options_prefix,"Quiet")),
-                 ('v',"verbose", "%s::%s" % (options_prefix,"Verbose")),
-                ]
-
-    args = apt_pkg.parse_commandline(cnf.Cnf, arguments,sys.argv)
-
-    num_threads = 1
-
-    if len(args) > 0:
-        usage(1)
-
-    if cnf.has_key("%s::%s" % (options_prefix,"Help")):
-        usage(0)
-
-    level=logging.INFO
-    if cnf.has_key("%s::%s" % (options_prefix,"Quiet")):
-        level=logging.ERROR
-
-    elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")):
-        level=logging.DEBUG
-
-
-    logging.basicConfig( level=level,
-                         format='%(asctime)s %(levelname)s %(message)s',
-                         stream = sys.stderr )
-
-    ImportNewFiles()
-
-
-if __name__ == '__main__':
-    main()
index ce5d12ff41348c1f4c929fd7f5374554eafd13e3..a9f28322f1edfd7f74d323e9c1ee8a3ea63fe273 100755 (executable)
@@ -56,17 +56,17 @@ Make microchanges or microqueries of the binary overrides
 """
     sys.exit(exit_code)
 
-def check_override_compliance(package, priority, suite, cnf, session):
+def check_override_compliance(package, priority, suite_name, cnf, session):
     print "Checking compliance with related overrides..."
 
     depends = set()
     rdepends = set()
     components = get_component_names(session)
-    arches = set([x.arch_string for x in get_suite_architectures(suite)])
+    arches = set([x.arch_string for x in get_suite_architectures(suite_name)])
     arches -= set(["source", "all"])
     for arch in arches:
         for component in components:
-            Packages = utils.get_packages_from_ftp(cnf['Dir::Root'], suite, component, arch)
+            Packages = utils.get_packages_from_ftp(cnf['Dir::Root'], suite_name, component, arch)
             while Packages.Step():
                 package_name = Packages.Section.Find("Package")
                 dep_list = Packages.Section.Find("Depends")
@@ -87,7 +87,7 @@ def check_override_compliance(package, priority, suite, cnf, session):
                JOIN priority p ON p.id = o.priority
                WHERE s.suite_name = '%s'
                AND o.package in ('%s')""" \
-               % (suite, "', '".join(depends.union(rdepends)))
+               % (suite_name, "', '".join(depends.union(rdepends)))
     packages = session.execute(query)
 
     excuses = []
@@ -136,7 +136,7 @@ def main ():
         utils.fubar("package name is a required argument.")
 
     package = arguments.pop(0)
-    suite = Options["Suite"]
+    suite_name = Options["Suite"]
     if arguments and len(arguments) > 2:
         utils.fubar("Too many arguments")
 
@@ -170,8 +170,8 @@ def main ():
        AND override.section = section.id
        AND override.package = :package
        AND override.suite = suite.id
-       AND suite.suite_name = :suite
-        """ % (eqdsc), {'package': package, 'suite': suite})
+       AND suite.suite_name = :suite_name
+        """ % (eqdsc), {'package': package, 'suite_name': suite_name})
 
         if q.rowcount == 0:
             continue
@@ -227,7 +227,7 @@ def main ():
         utils.fubar("Trying to change priority of a source-only package")
 
     if Options["Check"] and newpriority != oldpriority:
-        check_override_compliance(package, p, suite, cnf, session)
+        check_override_compliance(package, p, suite_name, cnf, session)
 
     # If we're in no-action mode
     if Options["No-Action"]:
@@ -266,9 +266,9 @@ def main ():
            SET priority = :newprioid
          WHERE package = :package
            AND override.type != :otypedsc
-           AND suite = (SELECT id FROM suite WHERE suite_name = :suite)""",
+           AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""",
            {'newprioid': newprioid, 'package': package,
-            'otypedsc':  dsc_otype_id, 'suite': suite})
+            'otypedsc':  dsc_otype_id, 'suite_name': suite_name})
 
         Logger.log(["changed priority", package, oldpriority, newpriority])
 
@@ -277,9 +277,9 @@ def main ():
         UPDATE override
            SET section = :newsecid
          WHERE package = :package
-           AND suite = (SELECT id FROM suite WHERE suite_name = :suite)""",
+           AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""",
            {'newsecid': newsecid, 'package': package,
-            'suite': suite})
+            'suite_name': suite_name})
 
         Logger.log(["changed section", package, oldsection, newsection])
 
@@ -311,7 +311,7 @@ def main ():
         Subst["__SOURCE__"] = package
 
         summary = "Concerning package %s...\n" % (package)
-        summary += "Operating on the %s suite\n" % (suite)
+        summary += "Operating on the %s suite\n" % (suite_name)
         if newpriority != oldpriority:
             summary += "Changed priority from %s to %s\n" % (oldpriority,newpriority)
         if newsection != oldsection:
index 6cb392df7f3ff673c1f9d89d0e0a1001e8d341dd..e48bd7de23a9c826f46ed73027f2a0e8907ff66e 100755 (executable)
@@ -81,13 +81,13 @@ def main():
 
     depends = {}
     session = DBConn().session()
-    suite = Options['suite']
+    suite_name = Options['suite']
     components = get_component_names(session)
-    arches = set([x.arch_string for x in get_suite_architectures(suite)])
+    arches = set([x.arch_string for x in get_suite_architectures(suite_name)])
     arches -= set(['source', 'all'])
     for arch in arches:
         for component in components:
-            Packages = utils.get_packages_from_ftp(cnf['Dir::Root'], suite, component, arch)
+            Packages = utils.get_packages_from_ftp(cnf['Dir::Root'], suite_name, component, arch)
             while Packages.Step():
                 package = Packages.Section.Find('Package')
                 dep_list = Packages.Section.Find('Depends')
@@ -110,7 +110,7 @@ def main():
                JOIN bin_associations ba ON ba.bin = b.id
                WHERE s.suite_name = '%s'
                AND ba.suite = s.id
-               AND p.level <> 0""" % suite
+               AND p.level <> 0""" % suite_name
     packages = session.execute(query)
 
     out = {}
index 53ab7cc98f7be1553e050449c181e238c97a3275..1908ecb5ed2e2310c257555be45a6cf1059d8cc5 100755 (executable)
@@ -188,6 +188,7 @@ Logger = None
 def usage (exit_code=0):
     print """Usage: dak process-upload [OPTION]... [CHANGES]...
   -a, --automatic           automatic run
+  -d, --directory <DIR>     process uploads in <DIR>
   -h, --help                show this help and exit.
   -n, --no-action           don't do anything
   -p, --no-lock             don't check lockfile !! for cron.daily only !!
diff --git a/dak/split_done.py b/dak/split_done.py
deleted file mode 100755 (executable)
index 3d07287..0000000
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-
-""" Split queue/done into date based subdirectories """
-# Copyright (C) 2004, 2005, 2006  James Troup <james@nocrew.org>
-# Copyright (C) 2008  Joerg Jaspert <joerg@debian.org>
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
-
-################################################################################
-
-import glob, os, stat, time
-from daklib import utils
-
-################################################################################
-
-def main():
-    Cnf = utils.get_conf()
-    count = 0
-    move_date = int(time.time())
-    os.chdir(Cnf["Dir::Done"])
-    files = glob.glob("%s/*" % (Cnf["Dir::Done"]))
-    for filename in files:
-        if os.path.isfile(filename):
-            filemtime = os.stat(filename)[stat.ST_MTIME]
-            if filemtime > move_date:
-                continue
-            mtime = time.gmtime(filemtime)
-            dirname = time.strftime("%Y/%m/%d", mtime)
-            if not os.path.exists(dirname):
-                print "Creating: %s" % (dirname)
-                os.makedirs(dirname)
-            dest = dirname + '/' + os.path.basename(filename)
-            if os.path.exists(dest):
-                utils.warn("%s already exists." % (dest))
-                continue
-            print "Move: %s -> %s" % (filename, dest)
-            os.rename(filename, dest)
-            count = count + 1
-    print "Moved %d files." % (count)
-
-############################################################
-
-if __name__ == '__main__':
-    main()
index c0801b42511cbb2eac973e7b53bf3a82653cd3e8..a31e9167c6cd4671a55274be67188bc39ff0c521 100755 (executable)
@@ -500,7 +500,7 @@ def subprocess_setup():
 class DBBinary(ORMObject):
     def __init__(self, package = None, source = None, version = None, \
         maintainer = None, architecture = None, poolfile = None, \
-        binarytype = 'deb'):
+        binarytype = 'deb', fingerprint=None):
         self.package = package
         self.source = source
         self.version = version
@@ -508,6 +508,7 @@ class DBBinary(ORMObject):
         self.architecture = architecture
         self.poolfile = poolfile
         self.binarytype = binarytype
+        self.fingerprint = fingerprint
 
     @property
     def pkid(self):
@@ -2459,13 +2460,14 @@ class Dak822(Deb822):
 
 class DBSource(ORMObject):
     def __init__(self, source = None, version = None, maintainer = None, \
-        changedby = None, poolfile = None, install_date = None):
+        changedby = None, poolfile = None, install_date = None, fingerprint = None):
         self.source = source
         self.version = version
         self.maintainer = maintainer
         self.changedby = changedby
         self.poolfile = poolfile
         self.install_date = install_date
+        self.fingerprint = fingerprint
 
     @property
     def pkid(self):
@@ -2478,7 +2480,7 @@ class DBSource(ORMObject):
 
     def not_null_constraints(self):
         return ['source', 'version', 'install_date', 'maintainer', \
-            'changedby', 'poolfile', 'install_date']
+            'changedby', 'poolfile']
 
     def read_control_fields(self):
         '''
diff --git a/daklib/fstransactions.py b/daklib/fstransactions.py
new file mode 100644 (file)
index 0000000..8fb7376
--- /dev/null
@@ -0,0 +1,201 @@
+# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""Transactions for filesystem actions
+"""
+
+import errno
+import os
+import shutil
+
+class _FilesystemAction(object):
+    @property
+    def temporary_name(self):
+        raise NotImplementedError()
+
+    def check_for_temporary(self):
+        try:
+            if os.path.exists(self.temporary_name):
+                raise IOError("Temporary file '{0}' already exists.".format(self.temporary_name))
+        except NotImplementedError:
+            pass
+
+class _FilesystemCopyAction(_FilesystemAction):
+    def __init__(self, source, destination, link=True, symlink=False, mode=None):
+        self.destination = destination
+        self.need_cleanup = False
+
+        self.check_for_temporary()
+        destdir = os.path.dirname(self.destination)
+        if not os.path.exists(destdir):
+            os.makedirs(destdir, 0o2775)
+        if symlink:
+            os.symlink(source, self.destination)
+        elif link:
+            try:
+                os.link(source, self.destination)
+            except OSError:
+                shutil.copy2(source, self.destination)
+        else:
+            shutil.copy2(source, self.destination)
+
+        self.need_cleanup = True
+        if mode is not None:
+            os.chmod(self.destination, mode)
+
+    @property
+    def temporary_name(self):
+        return self.destination
+
+    def commit(self):
+        pass
+
+    def rollback(self):
+        if self.need_cleanup:
+            os.unlink(self.destination)
+            self.need_cleanup = False
+
+class _FilesystemUnlinkAction(_FilesystemAction):
+    def __init__(self, path):
+        self.path = path
+        self.need_cleanup = False
+
+        self.check_for_temporary()
+        os.rename(self.path, self.temporary_name)
+        self.need_cleanup = True
+
+    @property
+    def temporary_name(self):
+        return "{0}.dak-rm".format(self.path)
+
+    def commit(self):
+        if self.need_cleanup:
+            os.unlink(self.temporary_name)
+            self.need_cleanup = False
+
+    def rollback(self):
+        if self.need_cleanup:
+            os.rename(self.temporary_name, self.path)
+            self.need_cleanup = False
+
+class _FilesystemCreateAction(_FilesystemAction):
+    def __init__(self, path):
+        self.path = path
+        self.need_cleanup = True
+
+    @property
+    def temporary_name(self):
+        return self.path
+
+    def commit(self):
+        pass
+
+    def rollback(self):
+        if self.need_cleanup:
+            os.unlink(self.path)
+            self.need_cleanup = False
+
+class FilesystemTransaction(object):
+    """transactions for filesystem actions"""
+    def __init__(self):
+        self.actions = []
+
+    def copy(self, source, destination, link=True, symlink=False, mode=None):
+        """copy `source` to `destination`
+
+        Args:
+           source (str): source file
+           destination (str): destination file
+
+        Kwargs:
+           link (bool): Try hardlinking, falling back to copying.
+           symlink (bool): Create a symlink instead
+           mode (int): Permissions to change `destination` to.
+        """
+        self.actions.append(_FilesystemCopyAction(source, destination, link=link, symlink=symlink, mode=mode))
+
+    def move(self, source, destination, mode=None):
+        """move `source` to `destination`
+
+        Args:
+           source (str): source file
+           destination (str): destination file
+
+        Kwargs:
+           mode (int): Permissions to change `destination` to.
+        """
+        self.copy(source, destination, link=True, mode=mode)
+        self.unlink(source)
+
+    def unlink(self, path):
+        """unlink `path`
+
+        Args:
+           path (str): file to unlink
+        """
+        self.actions.append(_FilesystemUnlinkAction(path))
+
+    def create(self, path, mode=None):
+        """create `filename` and return file handle
+
+        Args:
+           filename (str): file to create
+
+        Kwargs:
+           mode (int): Permissions for the new file
+
+        Returns:
+           file handle of the new file
+        """
+        destdir = os.path.dirname(path)
+        if not os.path.exists(destdir):
+            os.makedirs(destdir, 0o2775)
+        if os.path.exists(path):
+            raise IOError("File '{0}' already exists.".format(path))
+        fh = open(path, 'w')
+        self.actions.append(_FilesystemCreateAction(path))
+        if mode is not None:
+            os.chmod(path, mode)
+        return fh
+
+    def commit(self):
+        """Commit all recorded actions."""
+        try:
+            for action in self.actions:
+                action.commit()
+        except:
+            self.rollback()
+            raise
+        finally:
+            self.actions = []
+
+    def rollback(self):
+        """Undo all recorded actions."""
+        try:
+            for action in self.actions:
+                action.rollback()
+        finally:
+            self.actions = []
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        if type is None:
+            self.commit()
+        else:
+            self.rollback()
+        return None
index 62bfe096510453c180acb4fda1f5b80af6581ffa..865e9bd6dccc13b9b47354c510fa9c5503f2ab8c 100644 (file)
@@ -68,7 +68,7 @@ class SignedFile(object):
     def __init__(self, data, keyrings, require_signature=True, gpg="/usr/bin/gpg"):
         """
         @param data: string containing the message
-        @param keyrings: seqeuence of keyrings
+        @param keyrings: sequence of keyrings
         @param require_signature: if True (the default), will raise an exception if no valid signature was found
         @param gpg: location of the gpg binary
         """
index 8c9a7fae72545e9159973c60a2962c9c08c8b8d3..15b79a6368170e988b7fa8428f917359b4395907 100755 (executable)
@@ -131,3 +131,49 @@ re_includeinrelease = re.compile (r"(Translation-[a-zA-Z_]+\.(?:bz2|xz)|Contents
 
 # in generate_index_diffs
 re_includeinpdiff = re.compile(r"(Translation-[a-zA-Z_]+\.(?:bz2|xz))")
+
+
+######################################################################
+# Patterns matching filenames                                        #
+######################################################################
+
+# Match safe filenames
+re_file_safe = re.compile(r'^[a-zA-Z0-9][a-zA-Z0-9_.:~+-]*$')
+
+# Prefix of binary and source filenames
+_re_file_prefix = '^(?P<package>[a-z0-9][a-z0-9.+-]+)_(?P<version>[A-Za-z0-9.:~+-]+)'
+
+# Match binary packages
+# Groups: package, version, architecture, type
+re_file_binary = re.compile(_re_file_prefix + '_(?P<architecture>[a-z0-9]+)\.(?P<type>u?deb)$')
+
+# Match dsc files
+# Groups: package, version
+re_file_dsc = re.compile(_re_file_prefix + r'\.dsc$')
+
+# Match other source files
+# Groups: package, version
+re_file_source = re.compile(_re_file_prefix + r'(?:(?:\.orig(?:-[a-zA-Z0-9-]+)?)?\.tar\.(?:bz2|gz|xz)|\.diff\.gz)$')
+
+# Match upstream tarball
+# Groups: package, version
+re_file_orig = re.compile(_re_file_prefix + r'\.orig(?:-[a-zA-Z0-9-]+)?\.tar\.(?:bz2|gz|xz)')
+
+######################################################################
+# Patterns matching fields                                           #
+######################################################################
+
+# Match package name
+re_field_package = re.compile(r'^[a-z0-9][a-z0-9.+-]+$')
+
+# Match version
+# Groups: without-epoch
+re_field_version = re.compile(r'^(?:[0-9]+:)?(?P<without_epoch>[A-Za-z0-9.:~+-]+)$')
+
+# Extract upstream version
+# Groups: upstream
+re_field_version_upstream = re.compile(r'^(?:[0-9]+:)?(?P<upstream>.*)-[^-]*$')
+
+# Match source field
+# Groups: package, version
+re_field_source = re.compile(r'^(?P<package>[a-z0-9][a-z0-9.+-]+)(:?\s*\((?P<version>[A-Za-z0-9.:~+-])\))?')
diff --git a/tests/test_daklib_fstransactions.py b/tests/test_daklib_fstransactions.py
new file mode 100755 (executable)
index 0000000..41a109b
--- /dev/null
@@ -0,0 +1,119 @@
+#! /usr/bin/env python
+#
+# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+from base_test import DakTestCase
+from daklib.fstransactions import FilesystemTransaction
+
+from unittest import main
+
+import os
+import shutil
+import tempfile
+
+
+class TemporaryDirectory:
+    def __init__(self):
+        self.directory = None
+    def __str__(self):
+        return self.directory
+    def filename(self, suffix):
+        return os.path.join(self.directory, suffix)
+    def __enter__(self):
+        self.directory = tempfile.mkdtemp()
+        return self
+    def __exit__(self, *args):
+        if self.directory is not None:
+            shutil.rmtree(self.directory)
+            self.directory = None
+        return None
+
+class FilesystemTransactionTestCase(DakTestCase):
+    def _copy_a_b(self, tmp, fs, **kwargs):
+        fs.copy(tmp.filename('a'), tmp.filename('b'), **kwargs)
+
+    def _write_to_a(self, tmp):
+        with open(tmp.filename('a'), 'w') as fh:
+            print >>fh, 'a'
+
+    def test_copy_non_existing(self):
+        def copy():
+            with TemporaryDirectory() as t:
+                with FilesystemTransaction() as fs:
+                    self._copy_a_b(t, fs)
+
+        self.assertRaises(IOError, copy)
+
+    def test_copy_existing_and_commit(self):
+        with TemporaryDirectory() as t:
+            self._write_to_a(t)
+
+            with FilesystemTransaction() as fs:
+                self._copy_a_b(t, fs)
+                self.assert_(os.path.exists(t.filename('a')))
+                self.assert_(os.path.exists(t.filename('b')))
+
+            self.assert_(os.path.exists(t.filename('a')))
+            self.assert_(os.path.exists(t.filename('b')))
+
+    def test_copy_existing_and_rollback(self):
+        with TemporaryDirectory() as t:
+            self._write_to_a(t)
+
+            class TestException(Exception):
+                pass
+            try:
+                with FilesystemTransaction() as fs:
+                    self._copy_a_b(t, fs)
+                    self.assert_(os.path.exists(t.filename('a')))
+                    self.assert_(os.path.exists(t.filename('b')))
+                    raise TestException()
+            except TestException:
+                pass
+
+            self.assert_(os.path.exists(t.filename('a')))
+            self.assert_(not os.path.exists(t.filename('b')))
+
+    def test_unlink_and_commit(self):
+        with TemporaryDirectory() as t:
+            self._write_to_a(t)
+            a = t.filename('a')
+            with FilesystemTransaction() as fs:
+                self.assert_(os.path.exists(a))
+                fs.unlink(a)
+                self.assert_(not os.path.exists(a))
+            self.assert_(not os.path.exists(a))
+
+    def test_unlink_and_rollback(self):
+        with TemporaryDirectory() as t:
+            self._write_to_a(t)
+            a = t.filename('a')
+            class TestException(Exception):
+                pass
+
+            try:
+                with FilesystemTransaction() as fs:
+                    self.assert_(os.path.exists(a))
+                    fs.unlink(a)
+                    self.assert_(not os.path.exists(a))
+                    raise TestException()
+            except TestException:
+                pass
+            self.assert_(os.path.exists(a))
+
+if __name__ == '__main__':
+    main()