]> git.decadent.org.uk Git - dak.git/commitdiff
New.
authorJames Troup <james@nocrew.org>
Tue, 12 Feb 2002 23:08:07 +0000 (23:08 +0000)
committerJames Troup <james@nocrew.org>
Tue, 12 Feb 2002 23:08:07 +0000 (23:08 +0000)
15 files changed:
ashley [new file with mode: 0755]
docs/README.new-incoming [new file with mode: 0644]
docs/README.stable-point-release [new file with mode: 0644]
fernanda.py [new file with mode: 0755]
jennifer [new file with mode: 0755]
katie.py [new file with mode: 0644]
lisa [new file with mode: 0755]
templates/jennifer.accepted [new file with mode: 0644]
templates/jennifer.announce [new file with mode: 0644]
templates/jennifer.bug-close [new file with mode: 0644]
templates/jennifer.bug-nmu-fixed [new file with mode: 0644]
templates/jennifer.new [new file with mode: 0644]
templates/jennifer.override-disparity [new file with mode: 0644]
templates/katie.unaccept [new file with mode: 0644]
templates/lisa.bxa_notification [new file with mode: 0644]

diff --git a/ashley b/ashley
new file mode 100755 (executable)
index 0000000..38ee3ca
--- /dev/null
+++ b/ashley
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+
+# Dump variables from a .katie file to stdout
+# Copyright (C) 2001  James Troup <james@nocrew.org>
+# $Id: ashley,v 1.1 2002-02-12 23:08:07 troup Exp $
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+import string, sys;
+import katie, utils;
+
+def main():
+    Cnf = utils.get_conf()
+    k = katie.Katie(Cnf);
+    for arg in sys.argv[1:]:
+        if arg[-8:] != ".changes":
+            utils.warn("Skipping '%s', unknown file type." % (arg));
+            continue;
+        k.pkg.changes_file = arg;
+        print "%s:" % (arg);
+       k.init_vars();
+        k.update_vars();
+
+        changes = k.pkg.changes;
+        print " Changes:";
+        # Mandatory changes fields
+        for i in [ "source", "version", "maintainer", "urgency" ]:
+            print "  %s: %s" % (string.capitalize(i), changes[i]);
+            del changes[i];
+        # Mandatory changes lists
+        for i in [ "distribution", "architecture" ]:
+            print "  %s: %s" % (string.capitalize(i), string.join(changes[i].keys()));
+            del changes[i];
+        # Optional changes fields
+        for i in [ "changed-by", "changedby822", "maintainer822", "filecontents" ]:
+            if changes.has_key(i):
+                print "  %s: %s" % (string.capitalize(i), changes[i]);
+                del changes[i];
+        print;
+        if changes:
+            utils.warn("changes still has following unrecognised keys: %s" % (changes.keys()));
+
+        dsc = k.pkg.dsc;
+        print " Dsc:";
+        for i in [ "source", "version", "maintainer" ]:
+            if dsc.has_key(i):
+                print "  %s: %s" % (string.capitalize(i), dsc[i]);
+                del dsc[i];
+        print;
+        if dsc:
+            utils.warn("dsc still has following unrecognised keys: %s" % (dsc.keys()));
+
+        files = k.pkg.files;
+        print " Files:"
+        for file in files.keys():
+            print "  %s:" % (file);
+            for i in [ "package", "version", "architecture", "type", "size",
+                       "md5sum", "component", "location id", "source package",
+                       "source version", "maintainer", "dbtype", "files id",
+                       "new", "section", "priority" ]:
+                if files[file].has_key(i):
+                    print "   %s: %s" % (string.capitalize(i), files[file][i]);
+                    del files[file][i];
+            if files[file]:
+                utils.warn("files[%s] still has following unrecognised keys: %s" % (file, files[file].keys()));
+        print;
+
+        dsc_files = k.pkg.dsc_files;
+        print " Dsc Files:";
+        for file in dsc_files.keys():
+            print "  %s:" % (file);
+            # Mandatory fields
+            for i in [ "size", "md5sum" ]:
+                print "   %s: %s" % (string.capitalize(i), dsc_files[file][i]);
+                del dsc_files[file][i];
+            # Optional fields
+            for i in [ "files id" ]:
+                if dsc_files[file].has_key(i):
+                    print "   %s: %s" % (string.capitalize(i), dsc_files[file][i]);
+                    del dsc_files[file][i];
+            if dsc_files[file]:
+                utils.warn("dsc_files[%s] still has following unrecognised keys: %s" % (file, dsc_files[file].keys()));
+
+
+if __name__ == '__main__':
+    main()
+
diff --git a/docs/README.new-incoming b/docs/README.new-incoming
new file mode 100644 (file)
index 0000000..1581bb7
--- /dev/null
@@ -0,0 +1,132 @@
+
+                     Proposed New Incoming System
+                    ============================
+
+This document outlines the proposed new system for handling the
+Incoming directories on ftp-master and non-US.
+
+The present:
+------------
+
+  o incoming is a world writable directory
+
+  o incoming is available to all through http://incoming.debian.org/
+
+  o incoming is processed once a day by dinstall
+
+  o uploads in incoming must have been there > 24 hours before they
+    are REJECTed.  If they are processed before that and have problems
+    they will be SKIPped (with no notification to the maintainer and/or
+    uploader).
+
+The proposed future:
+--------------------
+
+  o There will be 4 incoming directories:
+
+     @ "unchecked"  - where uploads from Queue Daemons and maintainers
+                     initially go
+
+     @ "install"    - where installable packages stay until the daily
+                      dinstall run
+
+     @ "new"       - where NEW packages (and their dependents[1]) requiring
+                     human processing go after being automatically
+                     checked by dinstall.
+
+     @ "byhand"            - where BYHAND packages (and their dependents[1])
+                      requiring human intervention go after being
+                      automatically checked by dinstall.
+
+    In addition there will be 3 support directories:
+
+     @ "reject"            - where rejected uploads go
+
+     @ "done"      - where the .changes files for packages that have been
+                     installed go.
+
+     @ "holding"    - a temporary working area for dinstall to hold
+                     packages while checking them.
+
+  o Packages in 'unchecked' are automatically checked every 15 minutes
+    and are either: REJECT, ACCEPT (i.e. -> 'install'), NEW or BYHAND.
+
+  o Only 'unchecked' is locally world-writeable.  The others are all,
+    of course, locally world-readable but only 'install' and 'byhand'
+    are publicly visible on http://incoming.debian.org/
+
+  o 'install' and 'byhand' are made available to the auto-builders so
+     they can build out of them.
+
+  o 'install' is processed once a day as before.
+
+  o list notification and bug closures are changed to be done for
+    ACCEPTs, not INSTALLs. Mail is sent only to the maintainer/uploader
+    on INSTALL.
+    [Rationale: this reduces the load both on our list server and our
+     BTS server; it also gives people better notice of uploads to
+     avoid duplication of work especially, for example, in the case of NMUs]
+    [NB: see [3] for clarifications of when ACCEPT/INSTALL mails are sent]
+
+Why:
+----
+
+  o Security (no more replaceable file races)
+  o Integrity (new http://i.d.o contains only signed (+installable) uploads[2])
+  o Needed for crypto-in-main integration
+  o Allows safe auto-building out of incoming
+  o Allows previously-prohibitively-expensive checks to be added to dinstall
+  o Much faster feedback on packages; no more 48 hour waits before
+    finding out your package has been REJECTed.
+
+What breaks:
+------------
+
+  o uploads of large packages directly to incoming over a slow link
+    can lead to bogus rejections.
+
+    * solution: Ensure the .changes file is the last file to be
+                uploaded (dput and dupload already do this) or upload
+                to a temporary directory then mv them into place with
+                ssh.
+
+  o people who upload packages but then want to retract or replace the
+    upload.
+
+    * solution: mostly "Don't do that then"; i.e. test your uploads
+      properly.  Uploads can still be replaced, simply by uploading a
+      higher versioned replacement.  Total retraction is harder but
+      usually only relevant for NEW packages.
+
+================================================================================
+
+[1] For versions of dependents meaning: binaries compiled from the
+    source of BYHAND or NEW uploads.  Due to katie's fascist
+    source-must-exist checking, these binaries must be held back until
+    the BYHAND/NEW uploads are processed.
+
+[2] When this was initially written there was still at least one
+    upload queue which will accept unsigned uploads from any
+    source. [I've since discover it's been deactivated, but not, AFAIK
+    because it allowed unsigned uploads.]
+
+[3]
+             --> reject
+            /
+           /
+unchecked  ---------------------------[*]-------> install ------[+]------> pool
+           \               ^   ^
+            |             /   /
+            |-->   new  --   /
+            |       |[4]    /
+            |       V      /
+            |--> byhand --/
+
+[*] is ACCEPT and when list notification and bug closure occurs
+[+] is INSTALL and when maintainer/uploader notification occurs
+
+[4] This is a corner case, included for completeness, ignore
+    it. [Boring details: NEW trumps BYHAND, so it's possible for a
+    upload with both BYHAND and NEW components to go from 'unchecked'
+    -> 'new' -> 'byhand' -> 'install']
+
diff --git a/docs/README.stable-point-release b/docs/README.stable-point-release
new file mode 100644 (file)
index 0000000..eb1116a
--- /dev/null
@@ -0,0 +1,30 @@
+Rough Guide to doing Stable Point Releases in Debian
+----------------------------------------------------
+
+o Install, reject and remove packages as directed by the SRM
+o Do anything in proposed-updates/TODO
+o Close any applicable potato bugs (hint: http://bugs.debian.org/cgi-bin/pkgreport.cgi?pkg=ftp.debian.org&include=potato)
+o Update README and dists/README
+o Update the 'Debian<n>.<n>r<n>' symlink in dists/
+o Comment out "Untouchable" in katie.conf[-non-US]
+o Run 'jenna -s'
+o Run apt-ftparchive generate apt.conf.stable[-non-US]
+o Uncomment "Untouchable" in katie.conf[-non-US]
+o Update version fields in katie.conf[-non-US]
+o Update fields in suite table in postgresql
+o Copy tree/directory for stable to apt.conf[-non-US], run ziyi, remove it from apt.conf[-non-US]
+
+Yes, this sucks and more of it should be automated.
+
+update suite set version = '2.2r5' where id = 2;
+update suite set version = '2.2r6' where id = 3;
+update suite set version = '3.0-testing' where id = 4;
+update suite set version = '3.0' where id = 5;
+update suite set version = '3.0-testing-updates' where id = 6;
+update suite set description = 'Debian 2.2r5 Released 10th January 2002' where id = 2;
+update suite set description = 'Proposed Updates for Debian 2.2r5 - Not Released' where id = 3;
+update suite set description = 'Debian 3.0 Testing distribution - Not Released' where id = 4;
+update suite set description = 'Debian 3.0 Unstable - Not Released' where id = 5;
+update suite set description = 'Debian 3.0 Testing distribution updates - Not Released' where id = 6;
+
+
diff --git a/fernanda.py b/fernanda.py
new file mode 100755 (executable)
index 0000000..728ab73
--- /dev/null
@@ -0,0 +1,191 @@
+#!/usr/bin/env python
+
+# Script to automate some parts of checking NEW packages
+# Copyright (C) 2000, 2001  James Troup <james@nocrew.org>
+# $Id: fernanda.py,v 1.1 2002-02-12 23:08:07 troup Exp $
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+# <Omnic> elmo wrote docs?!!?!?!?!?!?!
+# <aj> as if he wasn't scary enough before!!
+# * aj imagines a little red furry toy sitting hunched over a computer
+#   tapping furiously and giggling to himself
+# <aj> eventually he stops, and his heads slowly spins around and you
+#      see this really evil grin and then he sees you, and picks up a
+#      knife from beside the keyboard and throws it at you, and as you
+#      breathe your last breath, he starts giggling again
+# <aj> but i should be telling this to my psychiatrist, not you guys,
+#      right? :)
+
+################################################################################
+
+import errno, os, re, sys
+import utils
+import apt_pkg
+
+################################################################################
+
+Cnf = None;
+projectB = None;
+
+re_package = re.compile(r"^(.+?)_.*");
+re_doc_directory = re.compile(r".*/doc/([^/]*).*");
+
+################################################################################
+
+def usage (exit_code=0):
+    print """Usage: fernanda [PACKAGE]...
+Check NEW package(s).
+
+  -h, --help                 show this help and exit
+
+PACKAGE can be a .changes, .dsc, .deb or .udeb filename."""
+
+    sys.exit(exit_code)
+
+################################################################################
+
+def do_command (command, filename):
+    o = os.popen("%s %s" % (command, filename));
+    print o.read();
+
+def print_copyright (deb_filename):
+    package = re_package.sub(r'\1', deb_filename);
+    o = os.popen("ar p %s data.tar.gz | tar tzvf - | egrep 'usr(/share)?/doc/[^/]*/copyright' | awk '{ print $6 }' | head -n 1" % (deb_filename));
+    copyright = o.read()[:-1];
+
+    if copyright == "":
+        print "WARNING: No copyright found, please check package manually."
+        return;
+
+    doc_directory = re_doc_directory.sub(r'\1', copyright);
+    if package != doc_directory:
+        print "WARNING: wrong doc directory (expected %s, got %s)." % (package, doc_directory);
+        return;
+
+    o = os.popen("ar p %s data.tar.gz | tar xzOf - %s" % (deb_filename, copyright));
+    print o.read();
+
+def check_dsc (dsc_filename):
+    print "---- .dsc file for %s ----" % (dsc_filename);
+    dsc_file = utils.open_file(dsc_filename);
+    for line in dsc_file.readlines():
+        print line[:-1];
+    print;
+
+def check_deb (deb_filename):
+    filename = os.path.basename(deb_filename);
+
+    if filename[-5:] == ".udeb":
+       is_a_udeb = 1;
+    else:
+       is_a_udeb = 0;
+
+    print "---- control file for %s ----" % (filename);
+    do_command ("dpkg -I", deb_filename);
+
+    if is_a_udeb:
+       print "---- skipping lintian check for µdeb ----";
+       print ;
+    else:
+       print "---- lintian check for %s ----" % (filename);
+        do_command ("lintian", deb_filename);
+
+    print "---- contents of %s ----" % (filename);
+    do_command ("dpkg -c", deb_filename);
+
+    if is_a_udeb:
+       print "---- skipping copyright for µdeb ----";
+    else:
+       print "---- copyright of %s ----" % (filename);
+        print_copyright(deb_filename);
+
+    print "---- file listing of %s ----" % (filename);
+    do_command ("ls -l", deb_filename);
+
+def display_changes (changes_filename):
+    print "---- .changes file for %s ----" % (changes_filename);
+    file = utils.open_file (changes_filename);
+    for line in file.readlines():
+       print line[:-1]
+    print ;
+    file.close();
+
+def check_changes (changes_filename):
+    display_changes(changes_filename);
+
+    changes = utils.parse_changes (changes_filename, 0);
+    files = utils.build_file_list(changes, "");
+    for file in files.keys():
+       if file[-4:] == ".deb" or file[-5:] == ".udeb":
+           check_deb(file);
+        if file[-4:] == ".dsc":
+            check_dsc(file);
+        # else: => byhand
+
+def main ():
+    global Cnf, projectB, db_files, waste, excluded;
+
+    Cnf = utils.get_conf()
+
+    Arguments = [('h',"help","Fernanda::Options::Help")];
+    for i in [ "help" ]:
+       if not Cnf.has_key("Frenanda::Options::%s" % (i)):
+           Cnf["Fernanda::Options::%s" % (i)] = "";
+
+    args = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv);
+    Options = Cnf.SubTree("Fernanda::Options")
+
+    if Options["Help"]:
+       usage();
+
+    stdout_fd = sys.stdout;
+
+    for file in args:
+        try:
+            # Pipe output for each argument through less
+            less_fd = os.popen("less -", 'w', 0);
+            sys.stdout = less_fd;
+
+            try:
+                if file[-8:] == ".changes":
+                    check_changes(file);
+                elif file[-4:] == ".deb" or file[-5:] == ".udeb":
+                    check_deb(file);
+                elif file[-4:] == ".dsc":
+                    check_dsc(file);
+                else:
+                    utils.fubar("Unrecognised file type: '%s'." % (file));
+            finally:
+                # Reset stdout here so future less invocations aren't FUBAR
+                less_fd.close();
+                sys.stdout = stdout_fd;
+        except IOError, e:
+            if errno.errorcode[e.errno] == 'EPIPE':
+                utils.warn("[fernanda] Caught EPIPE; skipping.");
+                pass;
+            else:
+                raise;
+        except KeyboardInterrupt:
+            utils.warn("[fernanda] Caught C-c; skipping.");
+            pass;
+
+#######################################################################################
+
+if __name__ == '__main__':
+    main()
+
diff --git a/jennifer b/jennifer
new file mode 100755 (executable)
index 0000000..574302e
--- /dev/null
+++ b/jennifer
@@ -0,0 +1,1155 @@
+#!/usr/bin/env python
+
+# Checks Debian packages from Incoming
+# Copyright (C) 2000, 2001  James Troup <james@nocrew.org>
+# $Id: jennifer,v 1.1 2002-02-12 23:08:07 troup Exp $
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+# Originally based on dinstall by Guy Maor <maor@debian.org>
+
+################################################################################
+
+# Computer games don't affect kids. I mean if Pacman affected our generation as
+# kids, we'd all run around in a darkened room munching pills and listening to
+# repetitive music.
+#         -- Unknown
+
+################################################################################
+
+import FCNTL, errno, fcntl, gzip, os, re, select, shutil, stat, string, sys, time, traceback;
+import apt_inst, apt_pkg;
+import db_access, katie, logging, utils;
+
+from types import *;
+from string import lower;
+
+################################################################################
+
+re_bad_diff = re.compile("^[\-\+][\-\+][\-\+] /dev/null");
+re_is_changes = re.compile (r"(.+?)_(.+?)_(.+?)\.changes$");
+
+################################################################################
+
+# Globals
+jennifer_version = "$Revision: 1.1 $";
+
+Cnf = None;
+Options = None;
+Logger = None;
+Katie = None;
+
+reprocess = 0;
+in_holding = {};
+
+# Aliases to the real vars in the Katie class; hysterical raisins.
+reject_message = "";
+changes = {};
+dsc = {};
+dsc_files = {};
+files = {};
+pkg = {};
+
+###############################################################################
+
+def init():
+    global Cnf, Options, Katie, changes, dsc, dsc_files, files, pkg;
+
+    apt_pkg.init();
+
+    Cnf = apt_pkg.newConfiguration();
+    apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file());
+
+    Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
+                 ('h',"help","Dinstall::Options::Help"),
+                 ('n',"no-action","Dinstall::Options::No-Action"),
+                 ('p',"no-lock", "Dinstall::Options::No-Lock"),
+                 ('s',"no-mail", "Dinstall::Options::No-Mail"),
+                 ('V',"version","Dinstall::Options::Version")];
+
+    for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
+              "override-distribution", "version"]:
+        Cnf["Dinstall::Options::%s" % (i)] = "";
+
+    changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv);
+    Options = Cnf.SubTree("Dinstall::Options")
+
+    Katie = katie.Katie(Cnf);
+
+    changes = Katie.pkg.changes;
+    dsc = Katie.pkg.dsc;
+    dsc_files = Katie.pkg.dsc_files;
+    files = Katie.pkg.files;
+    pkg = Katie.pkg;
+
+    return changes_files;
+
+#########################################################################################
+
+def usage (exit_code=0):
+    print """Usage: dinstall [OPTION]... [CHANGES]...
+  -a, --automatic           automatic run
+  -h, --help                show this help and exit.
+  -n, --no-action           don't do anything
+  -p, --no-lock             don't check lockfile !! for cron.daily only !!
+  -s, --no-mail             don't send any mail
+  -V, --version             display the version number and exit"""
+    sys.exit(exit_code)
+
+#########################################################################################
+
+# Our very own version of commands.getouputstatus(), hacked to support
+# gpgv's status fd.
+def get_status_output(cmd, status_read, status_write):
+    cmd = ['/bin/sh', '-c', cmd];
+    p2cread, p2cwrite = os.pipe();
+    c2pread, c2pwrite = os.pipe();
+    errout, errin = os.pipe();
+    pid = os.fork();
+    if pid == 0:
+        # Child
+        os.close(0);
+        os.close(1);
+        os.dup(p2cread);
+        os.dup(c2pwrite);
+        os.close(2);
+        os.dup(errin);
+        for i in range(3, 256):
+            if i != status_write:
+                try:
+                    os.close(i);
+                except:
+                    pass;
+        try:
+            os.execvp(cmd[0], cmd);
+        finally:
+            os._exit(1);
+
+    # parent
+    os.close(p2cread)
+    os.dup2(c2pread, c2pwrite);
+    os.dup2(errout, errin);
+
+    output = status = "";
+    while 1:
+        i, o, e = select.select([c2pwrite, errin, status_read], [], []);
+        more_data = [];
+        for fd in i:
+            r = os.read(fd, 8196);
+            if len(r) > 0:
+                more_data.append(fd);
+                if fd == c2pwrite or fd == errin:
+                    output = output + r;
+                elif fd == status_read:
+                    status = status + r;
+                else:
+                    utils.fubar("Unexpected file descriptor [%s] returned from select\n" % (fd));
+        if not more_data:
+            pid, exit_status = os.waitpid(pid, 0)
+            try:
+                os.close(status_write);
+                os.close(status_read);
+                os.close(c2pwrite);
+                os.close(p2cwrite);
+                os.close(errin);
+            except:
+                pass;
+            break;
+
+    return output, status, exit_status;
+
+#########################################################################################
+
+def Dict(**dict): return dict
+
+def prefix_multi_line_string(str, prefix):
+    out = "";
+    for line in string.split(str, '\n'):
+        line = string.strip(line);
+        if line:
+            out = out + "%s %s\n" % (prefix, line);
+    # Strip trailing new line
+    if out:
+        out = out[:-1];
+    return out;
+
+def reject (str, prefix="Rejected: "):
+    global reject_message;
+    if str:
+        reject_message = reject_message + prefix + str + "\n";
+
+#########################################################################################
+
+def check_signature (filename):
+    if not utils.re_taint_free.match(os.path.basename(filename)):
+        reject("!!WARNING!! tainted filename: '%s'." % (filename));
+        return 0;
+
+    status_read, status_write = os.pipe();
+    cmd = "gpgv --status-fd %s --keyring %s --keyring %s %s" \
+          % (status_write, Cnf["Dinstall::PGPKeyring"], Cnf["Dinstall::GPGKeyring"], filename);
+    (output, status, exit_status) = get_status_output(cmd, status_read, status_write);
+
+    # Process the status-fd output
+    keywords = {};
+    bad = internal_error = "";
+    for line in string.split(status, '\n'):
+        line = string.strip(line);
+        if line == "":
+            continue;
+        split = string.split(line);
+        if len(split) < 2:
+            internal_error = internal_error + "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line);
+            continue;
+        (gnupg, keyword) = split[:2];
+        if gnupg != "[GNUPG:]":
+            internal_error = internal_error + "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg);
+            continue;
+        args = split[2:];
+        if keywords.has_key(keyword) and keyword != "NODATA":
+            internal_error = internal_error + "found duplicate status token ('%s')." % (keyword);
+            continue;
+        else:
+            keywords[keyword] = args;
+
+    # If we failed to parse the status-fd output, let's just whine and bail now
+    if internal_error:
+        reject("internal error while performing signature check on %s." % (filename));
+        reject(internal_error, "");
+        reject("Please report the above errors to the Archive maintainers by replying to this mail.", "");
+        return None;
+
+    # Now check for obviously bad things in the processed output
+    if keywords.has_key("SIGEXPIRED"):
+        reject("key used to sign %s has expired." % (filename));
+        bad = 1;
+    if keywords.has_key("KEYREVOKED"):
+        reject("key used to sign %s has been revoked." % (filename));
+        bad = 1;
+    if keywords.has_key("BADSIG"):
+        reject("bad signature on %s." % (filename));
+        bad = 1;
+    if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
+        reject("failed to check signature on %s." % (filename));
+        bad = 1;
+    if keywords.has_key("NO_PUBKEY"):
+        reject("key used to sign %s not found in keyring." % (filename));
+        bad = 1;
+    if keywords.has_key("BADARMOR"):
+        reject("ascii armour of signature was corrupt in %s." % (filename));
+        bad = 1;
+    if keywords.has_key("NODATA"):
+        reject("no signature found in %s." % (filename));
+        bad = 1;
+
+    if bad:
+        return None;
+
+    # Next check gpgv exited with a zero return code
+    if exit_status:
+        reject("gpgv failed while checking %s." % (filename));
+        if string.strip(status):
+            reject(prefix_multi_line_string(status, " [GPG status-fd output:]"), "");
+        else:
+            reject(prefix_multi_line_string(output, " [GPG output:]"), "");
+        return None;
+
+    # Sanity check the good stuff we expect
+    if not keywords.has_key("VALIDSIG"):
+        reject("signature on %s does not appear to be valid [No VALIDSIG]." % (filename));
+        bad = 1;
+    else:
+        args = keywords["VALIDSIG"];
+        if len(args) < 1:
+            reject("internal error while checking signature on %s." % (filename));
+            bad = 1;
+        else:
+            fingerprint = args[0];
+    if not keywords.has_key("GOODSIG"):
+        reject("signature on %s does not appear to be valid [No GOODSIG]." % (filename));
+        bad = 1;
+    if not keywords.has_key("SIG_ID"):
+        reject("signature on %s does not appear to be valid [No SIG_ID]." % (filename));
+        bad = 1;
+
+    # Finally ensure there's not something we don't recognise
+    known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
+                          SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
+                          NODATA="");
+
+    for keyword in keywords.keys():
+        if not known_keywords.has_key(keyword):
+            reject("found unknown status token '%s' from gpgv with args '%s' in %s." % (keyword, repr(keywords[keyword]), filename));
+            bad = 1;
+
+    if bad:
+        return None;
+    else:
+        return fingerprint;
+
+################################################################################
+
+def copy_to_holding(filename):
+    global in_holding;
+
+    base_filename = os.path.basename(filename);
+
+    dest = Cnf["Dir::QueueHoldingDir"] + '/' + base_filename;
+    try:
+        fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640);
+        os.close(fd);
+    except OSError, e:
+        # Shouldn't happen, but will if, for example, someone lists a
+        # file twice in the .changes.
+        if errno.errorcode[e.errno] == 'EEXIST':
+            reject("%s already exists in holding area; can not overwrite." % (base_filename));
+            return;
+        raise;
+
+    try:
+        shutil.copy(filename, dest);
+    except IOError, e:
+        # In either case (ENOENT or EPERM) we want to remove the
+        # O_CREAT | O_EXCLed ghost file, so add the file to the list
+        # of 'in holding' even if it's not the real file.
+        if errno.errorcode[e.errno] == 'ENOENT':
+            reject("can not copy %s to holding area: file not found." % (base_filename));
+            os.unlink(dest);
+            return;
+        elif errno.errorcode[e.errno] == 'EACCES':
+            reject("can not copy %s to holding area: read permission denied." % (base_filename));
+            os.unlink(dest);
+            return;
+        raise;
+
+    in_holding[base_filename] = "";
+    return dest;
+
+################################################################################
+
+def clean_holding():
+    global in_holding;
+
+    cwd = os.getcwd();
+    os.chdir(Cnf["Dir::QueueHoldingDir"]);
+    for file in in_holding.keys():
+        if os.path.exists(file):
+            if string.find(file, '/') != -1:
+                utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (file));
+            else:
+                os.unlink(file);
+    in_holding = {};
+    os.chdir(cwd);
+
+################################################################################
+
+def check_changes():
+    filename = pkg.changes_file;
+
+    # Default in case we bail out
+    changes["maintainer822"] = Cnf["Dinstall::MyEmailAddress"];
+    changes["changedby822"] = Cnf["Dinstall::MyEmailAddress"];
+    changes["architecture"] = {};
+
+    # Parse the .changes field into a dictionary
+    try:
+        changes.update(utils.parse_changes(filename, 0));
+    except utils.cant_open_exc:
+        reject("can't read changes file '%s'." % (filename));
+        return 0;
+    except utils.changes_parse_error_exc, line:
+        reject("error parsing changes file '%s', can't grok: %s." % (filename, line));
+        return 0;
+
+    # Parse the Files field from the .changes into another dictionary
+    try:
+        files.update(utils.build_file_list(changes, ""));
+    except utils.changes_parse_error_exc, line:
+        reject("error parsing changes file '%s', can't grok: %s." % (filename, line));
+    except utils.nk_format_exc, format:
+        reject("unknown format '%s' of changes file '%s'." % (format, filename));
+        return 0;
+
+    # Check for mandatory fields
+    for i in ("source", "binary", "architecture", "version", "distribution", "maintainer", "files"):
+        if not changes.has_key(i):
+            reject("Missing field `%s' in changes file." % (i));
+            return 0    # Avoid <undef> errors during later tests
+
+    # Split multi-value fields into a lower-level dictionary
+    for i in ("architecture", "distribution", "binary", "closes"):
+        o = changes.get(i, "")
+        if o != "":
+            del changes[i]
+        changes[i] = {}
+        for j in string.split(o):
+            changes[i][j] = 1
+
+    # Fix the Maintainer: field to be RFC822 compatible
+    (changes["maintainer822"], changes["maintainername"], changes["maintaineremail"]) = utils.fix_maintainer (changes["maintainer"])
+
+    # Fix the Changed-By: field to be RFC822 compatible; if it exists.
+    (changes["changedby822"], changes["changedbyname"], changes["changedbyemail"]) = utils.fix_maintainer(changes.get("changed-by",""));
+
+    # Ensure all the values in Closes: are numbers
+    if changes.has_key("closes"):
+        for i in changes["closes"].keys():
+            if katie.re_isanum.match (i) == None:
+                reject("`%s' from Closes field isn't a number." % (i));
+
+    # Ensure there is a target distribution
+    if changes["distribution"].keys() == []:
+        reject("huh? Distribution field is empty in changes file.");
+
+    # Map frozen to unstable if frozen doesn't exist
+    if changes["distribution"].has_key("frozen") and not Cnf.has_key("Suite::Frozen"):
+        del changes["distribution"]["frozen"]
+        changes["distribution"]["unstable"] = 1;
+        reject("Mapping frozen to unstable.","");
+
+    # Map testing to unstable
+    if changes["distribution"].has_key("testing"):
+        if len(changes["distribution"].keys()) > 1:
+            del changes["distribution"]["testing"];
+            reject("Ignoring testing as a target suite.", "Warning: ");
+        else:
+            reject("invalid distribution 'testing'.");
+
+    # Ensure target distributions exist
+    for i in changes["distribution"].keys():
+        if not Cnf.has_key("Suite::%s" % (i)):
+            reject("Unknown distribution `%s'." % (i));
+
+    # Map unreleased arches from stable to unstable
+    if changes["distribution"].has_key("stable"):
+        for i in changes["architecture"].keys():
+            if not Cnf.has_key("Suite::Stable::Architectures::%s" % (i)):
+                reject("Mapping stable to unstable for unreleased arch %s." % (i),"");
+                del changes["distribution"]["stable"];
+                changes["distribution"]["unstable"] = 1;
+                break;
+
+    # Map arches not being released from frozen to unstable
+    if changes["distribution"].has_key("frozen"):
+        for i in changes["architecture"].keys():
+            if not Cnf.has_key("Suite::Frozen::Architectures::%s" % (i)):
+                reject("Mapping frozen to unstable for non-releasing arch `%s'." % (i),"");
+                del changes["distribution"]["frozen"]
+                changes["distribution"]["unstable"] = 1;
+
+    # Map stable uploads to proposed-updates
+    if changes["distribution"].has_key("stable"):
+        reject("Mapping stable to updates.","");
+        del changes["distribution"]["stable"];
+        changes["distribution"]["proposed-updates"] = 1;
+
+    # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
+    changes["chopversion"] = utils.re_no_epoch.sub('', changes["version"])
+    changes["chopversion2"] = utils.re_no_revision.sub('', changes["chopversion"])
+
+    # Ensure the changes filename matches it's contents.
+
+    base_filename = os.path.basename(filename);
+    m = re_is_changes.match(base_filename);
+    # source package name
+    if m:
+        file_source = m.group(1);
+        if changes["source"] != file_source:
+            reject("package name part of changes filename (%s) does not match source field (%s) in the changes file." % (file_source, changes["source"]));
+        # source version
+        file_version = m.group(2);
+        if changes["chopversion"] != file_version:
+            reject("version part of changes filename (%s) does not match version field (%s) in the changes file." % (file_version, changes["chopversion"]));
+    else:
+        reject("invalid changes filename '%s'." % (filename));
+
+    # But ... we can't check architecture because there's no
+    # established policy or practice on how to handle
+    # multi-architecture uploads.  Instead we check what we can and
+    # reject if there's a changes file of the same name already in one
+    # of the queue directories.
+    for dir in [ "Accepted", "Byhand", "Done", "New" ]:
+        if os.path.exists(Cnf["Dir::Queue%sDir" % (dir) ]+'/'+base_filename):
+            reject("a changes file with the same name exists in the %s directory." % (dir));
+
+    return 1;
+
+################################################################################
+
+def check_files():
+    global reprocess
+
+    archive = utils.where_am_i();
+    file_keys = files.keys();
+
+    # if reprocess is 2 we've already done this and we're checking
+    # things again for the new .orig.tar.gz.
+    # [Yes, I'm fully aware of how disgusting this is]
+    if not Options["No-Action"] and reprocess < 2:
+        cwd = os.getcwd();
+        os.chdir(pkg.directory);
+        for file in file_keys:
+            copy_to_holding(file);
+        os.chdir(cwd);
+
+    reprocess = 0;
+
+    for file in file_keys:
+        # Ensure the file does not already exist in one of the accepted directories
+        for dir in [ "Accepted", "Byhand", "New" ]:
+            if os.path.exists(Cnf["Dir::Queue%sDir" % (dir) ]+'/'+file):
+                reject("%s file already exists in the %s directory." % (file, dir));
+        if not utils.re_taint_free.match(file):
+            reject("!!WARNING!! tainted filename: '%s'." % (file));
+        # Check the file is readable
+        if os.access(file,os.R_OK) == 0:
+            # When running in -n, copy_to_holding() won't have
+            # generated the reject_message, so we need to.
+            if Options["No-Action"]:
+                if os.path.exists(file):
+                    reject("Can't read `%s'. [permission denied]" % (file));
+                else:
+                    reject("Can't read `%s'. [file not found]" % (file));
+            files[file]["type"] = "unreadable";
+            continue;
+        # If it's byhand skip remaining checks
+        if files[file]["section"] == "byhand":
+            files[file]["byhand"] = 1;
+            files[file]["type"] = "byhand";
+        # Checks for a binary package...
+        elif utils.re_isadeb.match(file) != None:
+            files[file]["type"] = "deb";
+
+            # Extract package control information
+            try:
+                control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)));
+            except:
+                reject("%s: debExtractControl() raised %s." % (file, sys.exc_type));
+                # Can't continue, none of the checks on control would work.
+                continue;
+
+            # Check for mandatory fields
+            for field in [ "Package", "Architecture", "Version" ]:
+                if control.Find(field) == None:
+                    reject("%s: No %s field in control." % (file, field));
+
+            # Ensure the package name matches the one give in the .changes
+            if not changes["binary"].has_key(control.Find("Package", "")):
+                reject("%s: control file lists name as `%s', which isn't in changes file." % (file, control.Find("Package", "")));
+
+            # Ensure the architecture of the .deb is one we know about.
+            if not Cnf.has_key("Suite::Unstable::Architectures::%s" % (control.Find("Architecture", ""))):
+                reject("Unknown architecture '%s'." % (control.Find("Architecture", "")));
+
+            # Ensure the architecture of the .deb is one of the ones
+            # listed in the .changes.
+            if not changes["architecture"].has_key(control.Find("Architecture", "")):
+                reject("%s: control file lists arch as `%s', which isn't in changes file." % (file, control.Find("Architecture", "")));
+
+            # Check the section & priority match those given in the .changes (non-fatal)
+            if control.Find("Section") != None and files[file]["section"] != "" and files[file]["section"] != control.Find("Section"):
+                reject("%s control file lists section as `%s', but changes file has `%s'." % (file, control.Find("Section", ""), files[file]["section"]), "Warning: ");
+            if control.Find("Priority") != None and files[file]["priority"] != "" and files[file]["priority"] != control.Find("Priority"):
+                reject("%s control file lists priority as `%s', but changes file has `%s'." % (file, control.Find("Priority", ""), files[file]["priority"]),"Warning: ");
+
+            files[file]["package"] = control.Find("Package");
+            files[file]["architecture"] = control.Find("Architecture");
+            files[file]["version"] = control.Find("Version");
+            files[file]["maintainer"] = control.Find("Maintainer", "");
+            if file[-5:] == ".udeb":
+                files[file]["dbtype"] = "udeb";
+            elif file[-4:] == ".deb":
+                files[file]["dbtype"] = "deb";
+            else:
+                reject("%s is neither a .deb or a .udeb." % (file));
+            files[file]["source"] = control.Find("Source", "");
+            if files[file]["source"] == "":
+                files[file]["source"] = files[file]["package"];
+            # Get the source version
+            source = files[file]["source"];
+            source_version = ""
+            if string.find(source, "(") != -1:
+                m = utils.re_extract_src_version.match(source)
+                source = m.group(1)
+                source_version = m.group(2)
+            if not source_version:
+                source_version = files[file]["version"];
+            files[file]["source package"] = source;
+            files[file]["source version"] = source_version;
+
+            # Ensure the filename matches the contents of the .deb
+            m = utils.re_isadeb.match(file);
+            #  package name
+            file_package = m.group(1);
+            if files[file]["package"] != file_package:
+                reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (file, file_package, files[file]["dbtype"], files[file]["package"]));
+            epochless_version = utils.re_no_epoch.sub('', control.Find("Version", ""))
+            #  version
+            file_version = m.group(2);
+            if epochless_version != file_version:
+                reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (file, file_version, files[file]["dbtype"], epochless_version));
+            #  architecture
+            file_architecture = m.group(3);
+            if files[file]["architecture"] != file_architecture:
+                reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (file, file_architecture, files[file]["dbtype"], files[file]["architecture"]));
+
+            # Check for existent source
+            source_version = files[file]["source version"];
+            source_package = files[file]["source package"];
+            if changes["architecture"].has_key("source"):
+                if source_version != changes["version"]:
+                    reject("source version (%s) for %s doesn't match changes version %s." % (source_version, file, changes["version"]));
+            else:
+                # Check in the SQL database
+                if not Katie.source_exists(source_package, source_version):
+                    # Check in one of the other directories
+                    source_epochless_version = utils.re_no_epoch.sub('', source_version);
+                    dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version);
+                    if os.path.exists(Cnf["Dir::QueueByhandDir"] + '/' + dsc_filename):
+                        files[file]["byhand"] = 1;
+                    elif os.path.exists(Cnf["Dir::QueueNewDir"] + '/' + dsc_filename):
+                        files[file]["new"] = 1;
+                    elif not os.path.exists(Cnf["Dir::QueueAcceptedDir"] + '/' + dsc_filename):
+                        reject("no source found for %s %s (%s)." % (source_package, source_version, file));
+
+        # Checks for a source package...
+        else:
+            m = utils.re_issource.match(file);
+            if m != None:
+                files[file]["package"] = m.group(1);
+                files[file]["version"] = m.group(2);
+                files[file]["type"] = m.group(3);
+
+                # Ensure the source package name matches the Source filed in the .changes
+                if changes["source"] != files[file]["package"]:
+                    reject("%s: changes file doesn't say %s for Source" % (file, files[file]["package"]));
+
+                # Ensure the source version matches the version in the .changes file
+                if files[file]["type"] == "orig.tar.gz":
+                    changes_version = changes["chopversion2"];
+                else:
+                    changes_version = changes["chopversion"];
+                if changes_version != files[file]["version"]:
+                    reject("%s: should be %s according to changes file." % (file, changes_version));
+
+                # Ensure the .changes lists source in the Architecture field
+                if not changes["architecture"].has_key("source"):
+                    reject("%s: changes file doesn't list `source' in Architecture field." % (file));
+
+                # Check the signature of a .dsc file
+                if files[file]["type"] == "dsc":
+                    dsc["fingerprint"] = check_signature(file);
+
+                files[file]["architecture"] = "source";
+
+            # Not a binary or source package?  Assume byhand...
+            else:
+                files[file]["byhand"] = 1;
+                files[file]["type"] = "byhand";
+
+        # Per-suite file checks
+        files[file]["oldfiles"] = {};
+        for suite in changes["distribution"].keys():
+            # Skip byhand
+            if files[file].has_key("byhand"):
+                continue
+
+            # Ensure the component is valid for the target suite
+            if Cnf.has_key("Suite:%s::Components" % (suite)) and not Cnf.has_key("Suite::%s::Components::%s" % (suite, files[file]["component"])):
+                reject("unknown component `%s' for suite `%s'." % (files[file]["component"], suite));
+                continue
+
+            # See if the package is NEW
+            if not Katie.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
+                files[file]["new"] = 1;
+
+            if files[file]["type"] == "deb":
+                reject(Katie.check_binaries_against_db(file, suite));
+
+            # Validate the component
+            component = files[file]["component"];
+            component_id = db_access.get_component_id(component);
+            if component_id == -1:
+                reject("file '%s' has unknown component '%s'." % (file, component));
+                continue;
+
+            # Validate the priority
+            if string.find(files[file]["priority"],'/') != -1:
+                reject("file '%s' has invalid priority '%s' [contains '/']." % (file, files[file]["priority"]));
+
+            # Check the md5sum & size against existing files (if any)
+            location = Cnf["Dir::PoolDir"];
+            files[file]["location id"] = db_access.get_location_id (location, component, archive);
+
+            files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"]);
+            files_id = db_access.get_files_id(files[file]["pool name"] + file, files[file]["size"], files[file]["md5sum"], files[file]["location id"]);
+            if files_id == -1:
+                reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (file));
+            elif files_id == -2:
+                reject("md5sum and/or size mismatch on existing copy of %s." % (file));
+            files[file]["files id"] = files_id
+
+            # Check for packages that have moved from one component to another
+            if files[file]["oldfiles"].has_key(suite) and files[file]["oldfiles"][suite]["name"] != files[file]["component"]:
+                files[file]["othercomponents"] = files[file]["oldfiles"][suite]["name"];
+
+###############################################################################
+
+def check_dsc ():
+    global reprocess;
+
+    for file in files.keys():
+        if files[file]["type"] == "dsc":
+            # Parse the .dsc file
+            try:
+                dsc.update(utils.parse_changes(file, 1));
+            except utils.cant_open_exc:
+                # if not -n copy_to_holding() will have done this for us...
+                if Options["No-Action"]:
+                    reject("can't read .dsc file '%s'." % (file));
+            except utils.changes_parse_error_exc, line:
+                reject("error parsing .dsc file '%s', can't grok: %s." % (file, line));
+            except utils.invalid_dsc_format_exc, line:
+                reject("syntax error in .dsc file '%s', line %s." % (file, line));
+            # Build up the file list of files mentioned by the .dsc
+            try:
+                dsc_files.update(utils.build_file_list(dsc, 1));
+            except utils.no_files_exc:
+                reject("no Files: field in .dsc file.");
+                continue;
+            except utils.changes_parse_error_exc, line:
+                reject("error parsing .dsc file '%s', can't grok: %s." % (file, line));
+                continue;
+
+            # Enforce mandatory fields
+            for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
+                if not dsc.has_key(i):
+                    reject("Missing field `%s' in dsc file." % (i));
+
+            # The dpkg maintainer from hell strikes again! Bumping the
+            # version number of the .dsc breaks extraction by stable's
+            # dpkg-source.
+            if dsc["format"] != "1.0":
+                reject("""[dpkg-sucks] source package was produced by a broken version
+          of dpkg-dev 1.9.1{3,4}; please rebuild with >= 1.9.15 version
+          installed.""");
+
+            # Ensure the version number in the .dsc matches the version number in the .changes
+            epochless_dsc_version = utils.re_no_epoch.sub('', dsc.get("version"));
+            changes_version = files[file]["version"];
+            if epochless_dsc_version != files[file]["version"]:
+                reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version));
+
+            # Ensure source is newer than existing source in target suites
+            reject(Katie.check_source_against_db(file));
+
+            (reject_msg, is_in_incoming) = Katie.check_dsc_against_db(file);
+            reject(reject_msg);
+            if is_in_incoming:
+                if not Options["No-Action"]:
+                    copy_to_holding(is_in_incoming);
+                orig_tar_gz = os.path.basename(is_in_incoming);
+                files[orig_tar_gz] = {};
+                files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE];
+                files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"];
+                files[orig_tar_gz]["section"] = files[file]["section"];
+                files[orig_tar_gz]["priority"] = files[file]["priority"];
+                files[orig_tar_gz]["component"] = files[file]["component"];
+                files[orig_tar_gz]["type"] = "orig.tar.gz";
+                reprocess = 2;
+
+################################################################################
+
+# Some cunning stunt broke dpkg-source in dpkg 1.8{,.1}; detect the
+# resulting bad source packages and reject them.
+
+# Even more amusingly the fix in 1.8.1.1 didn't actually fix the
+# problem just changed the symptoms.
+
+def check_diff ():
+    for filename in files.keys():
+        if files[filename]["type"] == "diff.gz":
+            file = gzip.GzipFile(filename, 'r');
+            for line in file.readlines():
+                if re_bad_diff.search(line):
+                    reject("[dpkg-sucks] source package was produced by a broken version of dpkg-dev 1.8.x; please rebuild with >= 1.8.3 version installed.");
+                    break;
+
+################################################################################
+
+# FIXME: should be a debian specific check called from a hook
+
+def check_urgency ():
+    if changes["architecture"].has_key("source"):
+        if not changes.has_key("urgency"):
+            changes["urgency"] = Cnf["Urgency::Default"];
+        if not Cnf.has_key("Urgency::Valid::%s" % changes["urgency"]):
+            reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ");
+            changes["urgency"] = Cnf["Urgency::Default"];
+        changes["urgency"] = lower(changes["urgency"]);
+
+################################################################################
+
+def check_md5sums ():
+    for file in files.keys():
+        try:
+            file_handle = utils.open_file(file);
+        except utils.cant_open_exc:
+            pass;
+        else:
+            if apt_pkg.md5sum(file_handle) != files[file]["md5sum"]:
+                reject("md5sum check failed for %s." % (file));
+
+################################################################################
+
+# Sanity check the time stamps of files inside debs.
+# [Files in the near future cause ugly warnings and extreme time
+#  travel can causes errors on extraction]
+
+def check_timestamps():
+    class Tar:
+        def __init__(self, future_cutoff, past_cutoff):
+            self.reset();
+            self.future_cutoff = future_cutoff;
+            self.past_cutoff = past_cutoff;
+
+        def reset(self):
+            self.future_files = {};
+            self.ancient_files = {};
+
+        def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
+            if MTime > self.future_cutoff:
+                self.future_files[Name] = MTime;
+            if MTime < self.past_cutoff:
+                self.ancient_files[Name] = MTime;
+    ####
+
+    future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"]);
+    past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"));
+    tar = Tar(future_cutoff, past_cutoff);
+    for filename in files.keys():
+        if files[filename]["type"] == "deb":
+            tar.reset();
+            try:
+                deb_file = utils.open_file(filename);
+                apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz");
+                deb_file.seek(0);
+                apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz");
+                #
+                future_files = tar.future_files.keys();
+                if future_files:
+                    num_future_files = len(future_files);
+                    future_file = future_files[0];
+                    future_date = tar.future_files[future_file];
+                    reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
+                           % (filename, num_future_files, future_file,
+                              time.ctime(future_date)));
+                #
+                ancient_files = tar.ancient_files.keys();
+                if ancient_files:
+                    num_ancient_files = len(ancient_files);
+                    ancient_file = ancient_files[0];
+                    ancient_date = tar.ancient_files[ancient_file];
+                    reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
+                           % (filename, num_ancient_files, ancient_file,
+                              time.ctime(ancient_date)));
+            except:
+                reject("%s: timestamp check failed; caught %s" % (filename, sys.exc_type));
+                raise;
+
+################################################################################
+################################################################################
+
+# If any file of an upload has a recent mtime then chances are good
+# the file is still being uploaded.
+
+def upload_too_new():
+    file_list = pkg.files.keys();
+    file_list.extend(pkg.dsc_files.keys());
+    file_list.append(pkg.changes_file);
+    for file in file_list:
+        try:
+            last_modified = time.time()-os.path.getmtime(pkg.changes_file);
+            if last_modified < int(Cnf["Dinstall::SkipTime"]):
+                return 1;
+        except:
+            pass;
+    return 0;
+
+def action ():
+    # changes["distribution"] may not exist in corner cases
+    # (e.g. unreadable changes files)
+    if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
+        changes["distribution"] = {};
+
+    (summary, short_summary) = Katie.build_summaries();
+
+    byhand = new = "";
+    for file in files.keys():
+        if files[file].has_key("byhand"):
+            byhand = 1
+        elif files[file].has_key("new"):
+            new = 1
+
+    (prompt, answer) = ("", "XXX")
+    if Options["No-Action"] or Options["Automatic"]:
+        answer = 'S'
+
+    if string.find(reject_message, "Rejected") != -1:
+        if upload_too_new():
+            print "SKIP (too new)\n" + reject_message,;
+            prompt = "[S]kip, Quit ?";
+        else:
+            print "REJECT\n" + reject_message,;
+            prompt = "[R]eject, Skip, Quit ?";
+            if Options["Automatic"]:
+                answer = 'R';
+    elif new:
+        print "NEW to %s\n%s%s" % (string.join(changes["distribution"].keys(), ", "), reject_message, summary),;
+        prompt = "[N]ew, Skip, Quit ?";
+        if Options["Automatic"]:
+            answer = 'N';
+    elif byhand:
+        print "BYHAND\n" + reject_message + summary,;
+        prompt = "[B]yhand, Skip, Quit ?";
+        if Options["Automatic"]:
+            answer = 'B';
+    else:
+        print "ACCEPT\n" + reject_message + summary,;
+        prompt = "[A]ccept, Skip, Quit ?";
+        if Options["Automatic"]:
+            answer = 'A';
+
+    while string.find(prompt, answer) == -1:
+        print prompt,;
+        answer = utils.our_raw_input()
+        m = katie.re_default_answer.match(prompt)
+        if answer == "":
+            answer = m.group(1)
+        answer = string.upper(answer[:1])
+
+    if answer == 'R':
+        os.chdir (pkg.directory);
+        Katie.do_reject(0, reject_message);
+    elif answer == 'A':
+        accept(summary, short_summary);
+    elif answer == 'B':
+        do_byhand(summary);
+    elif answer == 'N':
+        acknowledge_new (summary);
+    elif answer == 'Q':
+        sys.exit(0)
+
+################################################################################
+
+def accept (summary, short_summary):
+    Katie.accept(summary, short_summary);
+
+    # Check for override disparities
+    if not Cnf["Dinstall::Options::No-Mail"]:
+        Katie.check_override();
+
+    # Finally, remove the originals from the unchecked directory
+    os.chdir (pkg.directory);
+    for file in files.keys():
+        os.unlink(file);
+    os.unlink(pkg.changes_file);
+
+################################################################################
+
+def do_byhand (summary):
+    print "Moving to BYHAND holding area."
+
+    Katie.dump_vars(Cnf["Dir::QueueByhandDir"]);
+
+    file_keys = files.keys();
+
+    # Move all the files into the accepted directory
+    utils.move (pkg.changes_file, Cnf["Dir::QueueByhandDir"]);
+    for file in file_keys:
+        utils.move (file, Cnf["Dir::QueueByhandDir"]);
+
+    # Check for override disparities
+    if not Cnf["Dinstall::Options::No-Mail"]:
+        Katie.Subst["__SUMMARY__"] = summary;
+        Katie.check_override();
+
+    # Finally remove the originals.
+    os.chdir (pkg.directory);
+    for file in file_keys:
+        os.unlink(file);
+    os.unlink(pkg.changes_file);
+
+################################################################################
+
+def acknowledge_new (summary):
+    Subst = Katie.Subst;
+
+    print "Moving to NEW holding area."
+
+    Katie.dump_vars(Cnf["Dir::QueueNewDir"]);
+
+    file_keys = files.keys();
+
+    # Move all the files into the accepted directory
+    utils.move (pkg.changes_file, Cnf["Dir::QueueNewDir"]);
+    for file in file_keys:
+        utils.move (file, Cnf["Dir::QueueNewDir"]);
+
+    if not Options["No-Mail"]:
+        print "Sending new ack.";
+        Subst["__SUMMARY__"] = summary;
+        new_ack_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.new","r").read());
+        utils.send_mail(new_ack_message,"");
+
+    # Finally remove the originals.
+    os.chdir (pkg.directory);
+    for file in file_keys:
+        os.unlink(file);
+    os.unlink(pkg.changes_file);
+
+################################################################################
+
+# reprocess is necessary for the case of foo_1.2-1 and foo_1.2-2 in
+# Incoming. -1 will reference the .orig.tar.gz, but -2 will not.
+# dsccheckdistrib() can find the .orig.tar.gz but it will not have
+# processed it during it's checks of -2.  If -1 has been deleted or
+# otherwise not checked by jennifer, the .orig.tar.gz will not have been
+# checked at all.  To get round this, we force the .orig.tar.gz into
+# the .changes structure and reprocess the .changes file.
+
+def process_it (changes_file):
+    global reprocess, reject_message;
+
+    # Reset some globals
+    reprocess = 1;
+    Katie.init_vars();
+    reject_message = "";
+
+    # Absolutize the filename to avoid the requirement of being in the
+    # same directory as the .changes file.
+    pkg.changes_file = os.path.abspath(changes_file);
+
+    # Remember where we are so we can come back after cd-ing into the
+    # holding directory.
+    pkg.directory = os.getcwd();
+
+    try:
+        # If this is the Real Thing(tm), copy things into a private
+        # holding directory first to avoid replacable file races.
+        if not Options["No-Action"]:
+            os.chdir(Cnf["Dir::QueueHoldingDir"]);
+            copy_to_holding(pkg.changes_file);
+            # Relativize the filename so we use the copy in holding
+            # rather than the original...
+            pkg.changes_file = os.path.basename(pkg.changes_file);
+        changes["fingerprint"] = check_signature(pkg.changes_file);
+        changes_valid = check_changes();
+        if changes_valid:
+            while reprocess:
+                check_files();
+                check_md5sums();
+                check_dsc();
+                check_diff();
+                check_urgency();
+                check_timestamps();
+        Katie.update_subst(reject_message);
+        action();
+    except SystemExit:
+        raise;
+    except:
+        print "ERROR";
+       traceback.print_exc(file=sys.stdout);
+        pass;
+
+    # Restore previous WD
+    os.chdir(pkg.directory);
+
+###############################################################################
+
+def main():
+    global Cnf, Options, Logger, nmu;
+
+    changes_files = init();
+
+    if Options["Help"]:
+        usage();
+
+    if Options["Version"]:
+        print "jennifer %s" % (jennifer_version);
+        sys.exit(0);
+
+    # -n/--dry-run invalidates some other options which would involve things happening
+    if Options["No-Action"]:
+        Options["Automatic"] = "";
+
+    # Ensure all the arguments we were given are .changes files
+    for file in changes_files:
+        if file[-8:] != ".changes":
+            utils.warn("Ignoring '%s' because it's not a .changes file." % (file));
+            changes_files.remove(file);
+
+    if changes_files == []:
+        utils.fubar("Need at least one .changes file as an argument.");
+
+    # Check that we aren't going to clash with the daily cron job
+
+    if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (Cnf["Dir::RootDir"])) and not Options["No-Lock"]:
+        utils.fubar("Archive maintenance in progress.  Try again later.");
+
+    # Obtain lock if not in no-action mode and initialize the log
+
+    if not Options["No-Action"]:
+        lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT);
+        fcntl.lockf(lock_fd, FCNTL.F_TLOCK);
+        Logger = Katie.Logger = logging.Logger(Cnf, "jennifer");
+
+    # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
+    bcc = "X-Katie: %s" % (jennifer_version);
+    if Cnf.has_key("Dinstall::Bcc"):
+        Katie.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"]);
+    else:
+        Katie.Subst["__BCC__"] = bcc;
+
+
+    # Sort the .changes files so that we process sourceful ones first
+    changes_files.sort(utils.changes_compare);
+
+    # Process the changes files
+    for changes_file in changes_files:
+        print "\n" + changes_file;
+        try:
+            process_it (changes_file);
+        finally:
+            if not Options["No-Action"]:
+                clean_holding();
+
+    accept_count = Katie.accept_count;
+    accept_bytes = Katie.accept_bytes;
+    if accept_count:
+        sets = "set"
+        if accept_count > 1:
+            sets = "sets"
+        sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, utils.size_type(int(accept_bytes))));
+        Logger.log(["total",accept_count,accept_bytes]);
+
+    if not Options["No-Action"]:
+        Logger.close();
+
+################################################################################
+
+if __name__ == '__main__':
+    main()
+
diff --git a/katie.py b/katie.py
new file mode 100644 (file)
index 0000000..f877815
--- /dev/null
+++ b/katie.py
@@ -0,0 +1,688 @@
+#!/usr/bin/env python
+
+# Utility functions for katie
+# Copyright (C) 2001  James Troup <james@nocrew.org>
+# $Id: katie.py,v 1.1 2002-02-12 23:08:07 troup Exp $
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+###############################################################################
+
+import cPickle, errno, os, pg, re, stat, string;
+import utils, db_access;
+import apt_inst, apt_pkg;
+
+from types import *;
+from string import lower;
+
+###############################################################################
+
+re_isanum = re.compile (r"^\d+$");
+re_default_answer = re.compile(r"\[(.*)\]");
+re_fdnic = re.compile("\n\n");
+re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
+re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
+
+###############################################################################
+
+# Convenience wrapper to carry around all the package information in
+
+class Pkg:
+    def __init__(self, **kwds):
+        self.__dict__.update(kwds);
+
+    def update(self, **kwds):
+        self.__dict__.update(kwds);
+
+###############################################################################
+
+class nmu_p:
+    # Read in the group maintainer override file
+    def __init__ (self, Cnf):
+        self.group_maint = {};
+        self.Cnf = Cnf;
+        if Cnf.get("Dinstall::GroupOverrideFilename"):
+            filename = Cnf["Dir::OverrideDir"] + Cnf["Dinstall::GroupOverrideFilename"];
+            file = utils.open_file(filename);
+            for line in file.readlines():
+                line = lower(string.strip(utils.re_comments.sub('', line)));
+                if line != "":
+                    self.group_maint[line] = 1;
+            file.close();
+
+    def is_an_nmu (self, pkg):
+        Cnf = self.Cnf;
+        changes = pkg.changes;
+        dsc = pkg.dsc;
+
+        (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
+        # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
+        if dsc_name == lower(changes["maintainername"]) and \
+           (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
+            return 0;
+
+        if dsc.has_key("uploaders"):
+            uploaders = string.split(lower(dsc["uploaders"]), ",");
+            uploadernames = {};
+            for i in uploaders:
+                (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
+                uploadernames[name] = "";
+            if uploadernames.has_key(lower(changes["changedbyname"])):
+                return 0;
+
+        # Some group maintained packages (e.g. Debian QA) are never NMU's
+        if self.group_maint.has_key(lower(changes["maintaineremail"])):
+            return 0;
+
+        return 1;
+
+###############################################################################
+
+class Katie:
+
+    def __init__(self, Cnf):
+        self.Cnf = Cnf;
+        self.values = {};
+        # Read in the group-maint override file
+        self.nmu = nmu_p(Cnf);
+        self.accept_count = 0;
+        self.accept_bytes = 0L;
+        self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
+                       legacy_source_untouchable = {});
+
+        # Initialize the substitution template mapping global
+        Subst = self.Subst = {};
+        Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
+        Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
+        Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
+        Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
+
+        self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
+        db_access.init(Cnf, self.projectB);
+
+    ###########################################################################
+
+    def init_vars (self):
+        for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
+            exec "self.pkg.%s.clear();" % (i);
+        self.pkg.orig_tar_id = None;
+        self.pkg.orig_tar_location = "";
+
+    ###########################################################################
+
+    def update_vars (self):
+        dump_filename = self.pkg.changes_file[:-8]+".katie";
+        dump_file = utils.open_file(dump_filename);
+        p = cPickle.Unpickler(dump_file);
+        for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
+            exec "self.pkg.%s.update(p.load());" % (i);
+        for i in [ "orig_tar_id", "orig_tar_location" ]:
+            exec "self.pkg.%s = p.load();" % (i);
+        dump_file.close();
+
+    ###########################################################################
+
+    # This could just dump the dictionaries as is, but I'd like to avoid
+    # this so there's some idea of what katie & lisa use from jennifer
+
+    def dump_vars(self, dest_dir):
+        for i in [ "changes", "dsc", "files", "dsc_files",
+                   "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
+            exec "%s = self.pkg.%s;" % (i,i);
+        dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
+        dump_file = utils.open_file(dump_filename, 'w');
+        p = cPickle.Pickler(dump_file, 1);
+        for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
+            exec "%s = {}" % i;
+        ## files
+        for file in files.keys():
+            d_files[file] = {};
+            for i in [ "package", "version", "architecture", "type", "size",
+                       "md5sum", "component", "location id", "source package",
+                       "source version", "maintainer", "dbtype", "files id",
+                       "new", "section", "priority", "oldfiles", "othercomponents" ]:
+                if files[file].has_key(i):
+                    d_files[file][i] = files[file][i];
+        ## changes
+        # Mandatory changes fields
+        for i in [ "distribution", "source", "architecture", "version", "maintainer",
+                   "urgency", "fingerprint" ]:
+            d_changes[i] = changes[i];
+        # Optional changes fields
+        for i in [ "changed-by", "changedby822", "maintainer822", "filecontents" ]:
+            d_changes[i] = changes[i];
+        ## dsc
+        for i in [ "source", "version", "maintainer", "fingerprint" ]:
+            if dsc.has_key(i):
+                d_dsc[i] = dsc[i];
+        ## dsc_files
+        for file in dsc_files.keys():
+            d_dsc_files[file] = {};
+            # Mandatory dsc_files fields
+            for i in [ "size", "md5sum" ]:
+                d_dsc_files[file][i] = dsc_files[file][i];
+            # Optional dsc_files fields
+            for i in [ "files id" ]:
+                if dsc_files[file].has_key(i):
+                    d_dsc_files[file][i] = dsc_files[file][i];
+
+        for i in [ d_changes, d_dsc, d_files, d_dsc_files,
+                   legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
+            p.dump(i);
+        dump_file.close();
+
+    ###########################################################################
+
+    # Set up the per-package template substitution mappings
+
+    def update_subst (self, reject_message = ""):
+        Subst = self.Subst;
+        changes = self.pkg.changes;
+        # If jennifer crashed out in the right place, architecture may still be a string.
+        if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
+            changes["architecture"] = { "Unknown" : "" };
+        # and maintainer822 may not exist.
+        if not changes.has_key("maintainer822"):
+            changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
+
+        Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
+        Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
+        Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
+
+        # For source uploads the Changed-By field wins; otherwise Maintainer wins.
+        if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
+            Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
+            Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
+            Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
+        else:
+            Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
+            Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
+            Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
+        if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
+            Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
+
+        Subst["__REJECT_MESSAGE__"] = reject_message;
+        Subst["__SOURCE__"] = changes.get("source", "Unknown");
+        Subst["__VERSION__"] = changes.get("version", "Unknown");
+
+    ###########################################################################
+
+    def build_summaries(self):
+        changes = self.pkg.changes;
+        files = self.pkg.files;
+
+        byhand = summary = new = "";
+
+        # changes["distribution"] may not exist in corner cases
+        # (e.g. unreadable changes files)
+        if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
+            changes["distribution"] = {};
+
+        file_keys = files.keys();
+        file_keys.sort();
+        for file in file_keys:
+            if files[file].has_key("byhand"):
+                byhand = 1
+                summary = summary + file + " byhand\n"
+            elif files[file].has_key("new"):
+                new = 1
+                summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
+                if files[file].has_key("othercomponents"):
+                    summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
+                if files[file]["type"] == "deb":
+                    summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
+            else:
+                files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
+                destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
+                summary = summary + file + "\n  to " + destination + "\n"
+
+        short_summary = summary;
+
+        # This is for direport's benefit...
+        f = re_fdnic.sub("\n .\n", changes.get("changes",""));
+
+        if byhand or new:
+            summary = summary + "Changes: " + f;
+
+        summary = summary + self.announce(short_summary, 0)
+
+        return (summary, short_summary);
+
+    ###########################################################################
+
+    def announce (self, short_summary, action):
+        Subst = self.Subst;
+        Cnf = self.Cnf;
+        changes = self.pkg.changes;
+        dsc = self.pkg.dsc;
+
+        # Only do announcements for source uploads with a recent dpkg-dev installed
+        if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
+            return ""
+
+        lists_done = {}
+        summary = ""
+        Subst["__SHORT_SUMMARY__"] = short_summary;
+
+        for dist in changes["distribution"].keys():
+            list = Cnf.Find("Suite::%s::Announce" % (dist))
+            if list == "" or lists_done.has_key(list):
+                continue
+            lists_done[list] = 1
+            summary = summary + "Announcing to %s\n" % (list)
+
+            if action:
+                Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
+                mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.announce","r").read());
+                utils.send_mail (mail_message, "")
+
+        bugs = changes["closes"].keys()
+        bugs.sort()
+        if not self.nmu.is_an_nmu(self.pkg):
+            summary = summary + "Closing bugs: "
+            for bug in bugs:
+                summary = summary + "%s " % (bug)
+                if action:
+                    Subst["__BUG_NUMBER__"] = bug;
+                    if changes["distribution"].has_key("stable"):
+                        Subst["__STABLE_WARNING__"] = """
+    Note that this package is not part of the released stable Debian
+    distribution.  It may have dependencies on other unreleased software,
+    or other instabilities.  Please take care if you wish to install it.
+    The update will eventually make its way into the next released Debian
+    distribution."""
+                    else:
+                        Subst["__STABLE_WARNING__"] = "";
+                    mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-close","r").read());
+                    utils.send_mail (mail_message, "")
+            if action:
+                self.Logger.log(["closing bugs"]+bugs);
+        else:                     # NMU
+            summary = summary + "Setting bugs to severity fixed: "
+            control_message = ""
+            for bug in bugs:
+                summary = summary + "%s " % (bug)
+                control_message = control_message + "tag %s + fixed\n" % (bug)
+            if action and control_message != "":
+                Subst["__CONTROL_MESSAGE__"] = control_message;
+                mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-nmu-fixed","r").read());
+                utils.send_mail (mail_message, "")
+            if action:
+                self.Logger.log(["setting bugs to fixed"]+bugs);
+        summary = summary + "\n"
+
+        return summary
+
+    ###########################################################################
+
+    def accept (self, summary, short_summary):
+        Cnf = self.Cnf;
+        Subst = self.Subst;
+        files = self.pkg.files;
+
+        print "Accepting."
+
+        self.dump_vars(Cnf["Dir::QueueAcceptedDir"]);
+
+        # Move all the files into the accepted directory
+        utils.move(self.pkg.changes_file, Cnf["Dir::QueueAcceptedDir"]);
+        file_keys = files.keys();
+        for file in file_keys:
+            utils.move(file, Cnf["Dir::QueueAcceptedDir"]);
+            self.accept_bytes = self.accept_bytes + float(files[file]["size"])
+        self.accept_count = self.accept_count + 1;
+
+        # Send accept mail, announce to lists, close bugs and check for
+        # override disparities
+        if not Cnf["Dinstall::Options::No-Mail"]:
+            Subst["__SUITE__"] = "";
+            Subst["__SUMMARY__"] = summary;
+            mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.accepted","r").read());
+            utils.send_mail(mail_message, "")
+            self.announce(short_summary, 1)
+
+    ###########################################################################
+
+    def check_override (self):
+        Subst = self.Subst;
+        changes = self.pkg.changes;
+        files = self.pkg.files;
+
+        # Only check section & priority on sourceful uploads
+        if not changes["architecture"].has_key("source"):
+            return;
+
+        summary = "";
+        for file in files.keys():
+            if not files[file].has_key("new") and files[file]["type"] == "deb":
+                section = files[file]["section"];
+                override_section = files[file]["override section"];
+                if lower(section) != lower(override_section) and section != "-":
+                    # Ignore this; it's a common mistake and not worth whining about
+                    if lower(section) == "non-us/main" and lower(override_section) == "non-us":
+                        continue;
+                    summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
+                priority = files[file]["priority"];
+                override_priority = files[file]["override priority"];
+                if priority != override_priority and priority != "-":
+                    summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
+
+        if summary == "":
+            return;
+
+        Subst["__SUMMARY__"] = summary;
+        mail_message = utils.TemplateSubst(Subst,utils.open_file(self.Cnf["Dir::TemplatesDir"]+"/jennifer.override-disparity").read());
+        utils.send_mail (mail_message, "");
+
+    ###########################################################################
+
+    def force_move (self, files):
+        """Forcefully move files from the current directory to the reject
+           directory.  If any file already exists it will be moved to the
+           morgue to make way for the new file."""
+
+        Cnf = self.Cnf
+
+        for file in files:
+            # Skip any files which don't exist or which we don't have permission to copy.
+            if os.access(file,os.R_OK) == 0:
+                continue;
+            dest_file = os.path.join(Cnf["Dir::QueueRejectDir"], file);
+            try:
+                os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
+            except OSError, e:
+                # File exists?  Let's try and move it to the morgue
+                if errno.errorcode[e.errno] == 'EEXIST':
+                    morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueRejectDir"],file);
+                    try:
+                        morgue_file = utils.find_next_free(morgue_file);
+                    except utils.tried_too_hard_exc:
+                        # Something's either gone badly Pete Tong, or
+                        # someone is trying to exploit us.
+                        utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
+                        return;
+                    utils.move(dest_file, morgue_file);
+                    try:
+                        os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
+                    except OSError, e:
+                        # Likewise
+                        utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
+                        return;
+                else:
+                    raise;
+            # If we got here, we own the destination file, so we can
+            # safely overwrite it.
+            utils.move(file, dest_file, 1);
+
+    ###########################################################################
+
+    def do_reject (self, manual = 0, reject_message = ""):
+        print "Rejecting.\n"
+
+        Cnf = self.Cnf;
+        Subst = self.Subst;
+        pkg = self.pkg;
+
+        reason_filename = pkg.changes_file[:-8] + ".reason";
+        reject_filename = Cnf["Dir::QueueRejectDir"] + '/' + reason_filename;
+
+        # Move all the files into the reject directory
+        reject_files = pkg.files.keys() + [pkg.changes_file];
+        self.force_move(reject_files);
+
+        # If we fail here someone is probably trying to exploit the race
+        # so let's just raise an exception ...
+        if os.path.exists(reject_filename):
+            os.unlink(reject_filename);
+        fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
+
+        if not manual:
+            Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
+            Subst["__MANUAL_REJECT_MESSAGE__"] = "";
+            Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
+            os.write(fd, reject_message);
+            os.close(fd);
+        else:
+            # Build up the rejection email
+            user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
+
+            Subst["__REJECTOR_ADDRESS__"] = user_email_address;
+            Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
+            Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
+            reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
+
+            # Write the rejection email out as the <foo>.reason file
+            os.write(fd, reject_mail_message);
+            os.close(fd);
+
+            # If we weren't given a manual rejection message, spawn an
+            # editor so the user can add one in...
+            if reject_message == "":
+                editor = os.environ.get("EDITOR","vi")
+                result = os.system("%s +6 %s" % (editor, reject_filename))
+                if result != 0:
+                    utils.fubar("editor invocation failed for '%s'!" % (reject_filename), result);
+
+        # Send the rejection mail if appropriate
+        if not Cnf["Dinstall::Options::No-Mail"]:
+            reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
+            utils.send_mail (reject_mail_message, "");
+
+        self.Logger.log(["rejected", pkg.changes_file]);
+
+    ################################################################################
+
+    # Ensure that source exists somewhere in the archive for the binary
+    # upload being processed.
+    #
+    # (1) exact match                      => 1.0-3
+    # (2) Bin-only NMU of an MU            => 1.0-3.0.1
+    # (3) Bin-only NMU of a sourceful-NMU  => 1.0-3.1.1
+
+    def source_exists (self, package, source_version):
+        q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
+
+        # Reduce the query results to a list of version numbers
+        ql = map(lambda x: x[0], q.getresult());
+
+        # Try (1)
+        if ql.count(source_version):
+            return 1;
+
+        # Try (2)
+        orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
+        if ql.count(orig_source_version):
+            return 1;
+
+        # Try (3)
+        orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
+        if ql.count(orig_source_version):
+            return 1;
+
+        # No source found...
+        return 0;
+
+    ################################################################################
+
+    def in_override_p (self, package, component, suite, binary_type, file):
+        files = self.pkg.files;
+
+        if binary_type == "": # must be source
+            type = "dsc";
+        else:
+            type = binary_type;
+
+        # Override suite name; used for example with proposed-updates
+        if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
+            suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
+
+        # Avoid <undef> on unknown distributions
+        suite_id = db_access.get_suite_id(suite);
+        if suite_id == -1:
+            return None;
+        component_id = db_access.get_component_id(component);
+        type_id = db_access.get_override_type_id(type);
+
+        # FIXME: nasty non-US speficic hack
+        if lower(component[:7]) == "non-us/":
+            component = component[7:];
+
+        q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
+                           % (package, suite_id, component_id, type_id));
+        result = q.getresult();
+        # If checking for a source package fall back on the binary override type
+        if type == "dsc" and not result:
+            type_id = db_access.get_override_type_id("deb");
+            q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
+                               % (package, suite_id, component_id, type_id));
+            result = q.getresult();
+
+        # Remember the section and priority so we can check them later if appropriate
+        if result != []:
+            files[file]["override section"] = result[0][0];
+            files[file]["override priority"] = result[0][1];
+
+        return result;
+
+    ################################################################################
+
+    def reject (self, str, prefix="Rejected: "):
+        if str:
+            self.reject_message = self.reject_message + prefix + str + "\n";
+
+    def check_binaries_against_db(self, file, suite):
+        self.reject_message = "";
+        files = self.pkg.files;
+
+        # Find any old binary packages
+        q = self.projectB.query("SELECT b.id, b.version, f.filename, l.path, c.name  FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f WHERE b.package = '%s' AND s.suite_name = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND b.file = f.id"
+                           % (files[file]["package"], suite, files[file]["architecture"]))
+        for oldfile in q.dictresult():
+            files[file]["oldfiles"][suite] = oldfile;
+            # Check versions [NB: per-suite only; no cross-suite checking done (yet)]
+            if apt_pkg.VersionCompare(files[file]["version"], oldfile["version"]) != 1:
+                reject("%s Old version `%s' >= new version `%s'." % (file, oldfile["version"], files[file]["version"]));
+        # Check for any existing copies of the file
+        q = self.projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
+        if q.getresult() != []:
+            reject("can not overwrite existing copy of '%s' already in the archive." % (file));
+
+        return self.reject_message;
+
+    ################################################################################
+
+    def check_source_against_db(self, file):
+        """Ensure source is newer than existing source in target suites."""
+        self.reject_message = "";
+        changes = self.pkg.changes;
+        dsc = self.pkg.dsc;
+
+        package = dsc.get("source");
+        new_version = dsc.get("version");
+        for suite in changes["distribution"].keys():
+            q = self.projectB.query("SELECT s.version FROM source s, src_associations sa, suite su WHERE s.source = '%s' AND su.suite_name = '%s' AND sa.source = s.id AND sa.suite = su.id"
+                               % (package, suite));
+            ql = map(lambda x: x[0], q.getresult());
+            for old_version in ql:
+                if apt_pkg.VersionCompare(new_version, old_version) != 1:
+                    reject("%s: Old version `%s' >= new version `%s'." % (file, old_version, new_version));
+        return self.reject_message;
+
+    ################################################################################
+
+    def check_dsc_against_db(self, file):
+        self.reject_message = "";
+        files = self.pkg.files;
+        dsc_files = self.pkg.dsc_files;
+        legacy_source_untouchable = self.pkg.legacy_source_untouchable;
+        orig_tar_gz = None;
+
+        # Try and find all files mentioned in the .dsc.  This has
+        # to work harder to cope with the multiple possible
+        # locations of an .orig.tar.gz.
+        for dsc_file in dsc_files.keys():
+            if files.has_key(dsc_file):
+                actual_md5 = files[dsc_file]["md5sum"];
+                actual_size = int(files[dsc_file]["size"]);
+                found = "%s in incoming" % (dsc_file)
+                # Check the file does not already exist in the archive
+                q = self.projectB.query("SELECT f.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
+
+                # "It has not broken them.  It has fixed a
+                # brokenness.  Your crappy hack exploited a bug in
+                # the old dinstall.
+                #
+                # "(Come on!  I thought it was always obvious that
+                # one just doesn't release different files with
+                # the same name and version.)"
+                #                        -- ajk@ on d-devel@l.d.o
+
+                if q.getresult() != []:
+                    reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
+            elif dsc_file[-12:] == ".orig.tar.gz":
+                # Check in the pool
+                q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
+                ql = q.getresult();
+
+                if ql != []:
+                    # Unfortunately, we make get more than one
+                    # match here if, for example, the package was
+                    # in potato but had a -sa upload in woody.  So
+                    # we need to choose the right one.
+
+                    x = ql[0]; # default to something sane in case we don't match any or have only one
+
+                    if len(ql) > 1:
+                        for i in ql:
+                            old_file = i[0] + i[1];
+                            actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
+                            actual_size = os.stat(old_file)[stat.ST_SIZE];
+                            if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
+                                x = i;
+                            else:
+                                legacy_source_untouchable[i[3]] = "";
+
+                    old_file = x[0] + x[1];
+                    actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
+                    actual_size = os.stat(old_file)[stat.ST_SIZE];
+                    found = old_file;
+                    suite_type = x[2];
+                    dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
+                    # See install() in katie...
+                    self.pkg.orig_tar_id = x[3];
+                    if suite_type == "legacy" or suite_type == "legacy-mixed":
+                        self.pkg.orig_tar_location = "legacy";
+                    else:
+                        self.pkg.orig_tar_location = x[4];
+                else:
+
+                    # Not there? Check in Incoming...
+                    # [See comment above jennifer's process_it() for
+                    # explanation of why this is necessary...]
+                    orig_tar_gz = self.pkg.directory + '/' + dsc_file;
+                    if os.path.exists(orig_tar_gz):
+                        return (self.reject_message, orig_tar_gz);
+                    else:
+                        reject("%s refers to %s, but I can't find it in Incoming or in the pool." % (file, dsc_file));
+                        continue;
+            else:
+                reject("%s refers to %s, but I can't find it in Incoming." % (file, dsc_file));
+                continue;
+            if actual_md5 != dsc_files[dsc_file]["md5sum"]:
+                reject("md5sum for %s doesn't match %s." % (found, file));
+            if actual_size != int(dsc_files[dsc_file]["size"]):
+                reject("size for %s doesn't match %s." % (found, file));
+
+        return (self.reject_message, orig_tar_gz);
diff --git a/lisa b/lisa
new file mode 100755 (executable)
index 0000000..7d600f1
--- /dev/null
+++ b/lisa
@@ -0,0 +1,758 @@
+#!/usr/bin/env python
+
+# Handles NEW and BYHAND packages
+# Copyright (C) 2001  James Troup <james@nocrew.org>
+# $Id: lisa,v 1.1 2002-02-12 23:08:07 troup Exp $
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+# 23:12|<aj> I will not hush!
+# 23:12|<elmo> :>
+# 23:12|<aj> Where there is injustice in the world, I shall be there!
+# 23:13|<aj> I shall not be silenced!
+# 23:13|<aj> The world shall know!
+# 23:13|<aj> The world *must* know!
+# 23:13|<elmo> oh dear, he's gone back to powerpuff girls... ;-)
+# 23:13|<aj> yay powerpuff girls!!
+# 23:13|<aj> buttercup's my favourite, who's yours?
+# 23:14|<aj> you're backing away from the keyboard right now aren't you?
+# 23:14|<aj> *AREN'T YOU*?!
+# 23:15|<aj> I will not be treated like this.
+# 23:15|<aj> I shall have my revenge.
+# 23:15|<aj> I SHALL!!!
+
+################################################################################
+
+# TODO
+# ----
+
+# We don't check error codes very thoroughly; the old 'trust jennifer'
+# chess nut... db_access calls in particular
+
+# Possible TODO
+# -------------
+
+# Handle multiple different section/priorities (?)
+# hardcoded debianness (debian-installer, source priority etc.) (?)
+# Slang/ncurses interface (?)
+# write changed sections/priority back to katie for later processing (?)
+
+################################################################################
+
+import errno, os, readline, string, stat, sys, tempfile;
+import apt_pkg, apt_inst;
+import db_access, fernanda, katie, logging, utils;
+
+# Globals
+lisa_version = "$Revision: 1.1 $";
+
+Cnf = None;
+Options = None;
+Katie = None;
+projectB = None;
+Logger = None;
+
+Priorities = None;
+Sections = None;
+
+################################################################################
+################################################################################
+################################################################################
+
+def determine_new (changes, files):
+    new = {};
+
+    # Build up a list of potentially new things
+    for file in files.keys():
+        f = files[file];
+        # Skip byhand elements
+        if f["type"] == "byhand":
+            continue;
+        pkg = f["package"];
+        priority = f["priority"];
+        section = f["section"];
+        type = get_type(f);
+        component = f["component"];
+
+        if type == "dsc":
+            priority = "source";
+        if not new.has_key(pkg):
+            new[pkg] = {};
+            new[pkg]["priority"] = priority;
+            new[pkg]["section"] = section;
+            new[pkg]["type"] = type;
+            new[pkg]["component"] = component;
+            new[pkg]["files"] = [];
+        else:
+            old_type = new[pkg]["type"];
+            if old_type != type:
+                # source gets trumped by deb or udeb
+                if old_type == "dsc":
+                    new[pkg]["priority"] = priority;
+                    new[pkg]["section"] = section;
+                    new[pkg]["type"] = type;
+                    new[pkg]["component"] = component;
+        new[pkg]["files"].append(file);
+
+    for suite in changes["distribution"].keys():
+        suite_id = db_access.get_suite_id(suite);
+        for pkg in new.keys():
+            component_id = db_access.get_component_id(new[pkg]["component"]);
+            type_id = db_access.get_override_type_id(new[pkg]["type"]);
+            q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id));
+            ql = q.getresult();
+            if ql:
+                for file in new[pkg]["files"]:
+                    if files[file].has_key("new"):
+                        del files[file]["new"];
+                del new[pkg];
+
+    return new;
+
+################################################################################
+
+# Sort by 'have source', by ctime, by source name, by source version number, by filename
+
+def changes_compare_by_time (a, b):
+    try:
+        a_changes = utils.parse_changes(a, 0)
+    except:
+        return -1;
+
+    try:
+        b_changes = utils.parse_changes(b, 0)
+    except:
+        return 1;
+
+    utils.cc_fix_changes (a_changes);
+    utils.cc_fix_changes (b_changes);
+
+    # Sort by 'have source'
+
+    a_has_source = a_changes["architecture"].get("source")
+    b_has_source = b_changes["architecture"].get("source")
+    if a_has_source and not b_has_source:
+        return -1;
+    elif b_has_source and not a_has_source:
+        return 1;
+
+    # Sort by ctime
+    a_ctime = os.stat(a)[stat.ST_CTIME];
+    b_ctime = os.stat(b)[stat.ST_CTIME];
+    q = cmp (a_ctime, b_ctime);
+    if q:
+        return q;
+
+    # Sort by source name
+
+    a_source = a_changes.get("source");
+    b_source = b_changes.get("source");
+    q = cmp (a_source, b_source);
+    if q:
+        return q;
+
+    # Sort by source version
+
+    a_version = a_changes.get("version");
+    b_version = b_changes.get("version");
+    q = apt_pkg.VersionCompare(a_version, b_version);
+    if q:
+        return q
+
+    # Fall back to sort by filename
+
+    return cmp(a, b);
+
+################################################################################
+
+class Section_Completer:
+    def __init__ (self):
+        self.sections = [];
+        q = projectB.query("SELECT section FROM section");
+        for i in q.getresult():
+            self.sections.append(i[0]);
+
+    def complete(self, text, state):
+        if state == 0:
+            self.matches = [];
+            n = len(text);
+            for word in self.sections:
+                if word[:n] == text:
+                    self.matches.append(word);
+        try:
+            return self.matches[state]
+        except IndexError:
+            return None
+
+############################################################
+
+class Priority_Completer:
+    def __init__ (self):
+        self.priorities = [];
+        q = projectB.query("SELECT priority FROM priority");
+        for i in q.getresult():
+            self.priorities.append(i[0]);
+
+    def complete(self, text, state):
+        if state == 0:
+            self.matches = [];
+            n = len(text);
+            for word in self.priorities:
+                if word[:n] == text:
+                    self.matches.append(word);
+        try:
+            return self.matches[state]
+        except IndexError:
+            return None
+
+################################################################################
+
+def check_valid (new):
+    for pkg in new.keys():
+        section = new[pkg]["section"];
+        priority = new[pkg]["priority"];
+        type = new[pkg]["type"];
+        new[pkg]["section id"] = db_access.get_section_id(section);
+        new[pkg]["priority id"] = db_access.get_priority_id(new[pkg]["priority"]);
+        # Sanity checks
+        if (section == "debian-installer" and type != "udeb") or \
+           (section != "debian-installer" and type == "udeb"):
+            new[pkg]["section id"] = -1;
+        if (priority == "source" and type != "dsc") or \
+           (priority != "source" and type == "dsc"):
+            new[pkg]["priority id"] = -1;
+
+################################################################################
+
+def print_new (new, indexed, file=sys.stdout):
+    check_valid(new);
+    ret_code = 0;
+    index = 0;
+    for pkg in new.keys():
+        index = index + 1;
+        section = new[pkg]["section"];
+        priority = new[pkg]["priority"];
+        if new[pkg]["section id"] == -1:
+            section = section + "[!]";
+            ret_code = 1;
+        if new[pkg]["priority id"] == -1:
+            priority = priority + "[!]";
+            ret_code = 1;
+        if indexed:
+            line = "(%s): %-20s %-20s %-20s" % (index, pkg, priority, section);
+        else:
+            line = "%-20s %-20s %-20s" % (pkg, priority, section);
+        line = string.strip(line)+'\n';
+        file.write(line);
+    return ret_code;
+
+################################################################################
+
+def get_type (f):
+    # Determine the type
+    if f.has_key("dbtype"):
+        type = f["dbtype"];
+    elif f["type"] == "orig.tar.gz" or f["type"] == "tar.gz" or f["type"] == "diff.gz" or f["type"] == "dsc":
+        type = "dsc";
+    else:
+        utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (type));
+
+    # Validate the override type
+    type_id = db_access.get_override_type_id(type);
+    if type_id == -1:
+        utils.fubar("invalid type (%s) for new.  Say wha?" % (type));
+
+    return type;
+
+################################################################################
+
+def index_range (index):
+    if index == 1:
+        return "1";
+    else:
+        return "1-%s" % (index);
+
+################################################################################
+################################################################################
+
+def spawn_editor (new):
+    # Write the current data to a temporary file
+    temp_filename = tempfile.mktemp();
+    fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
+    os.close(fd);
+    temp_file = utils.open_file(temp_filename, 'w');
+    print_new (new, 0, temp_file);
+    temp_file.close();
+    # Spawn an editor on that file
+    editor = os.environ.get("EDITOR","vi")
+    result = os.system("%s %s" % (editor, temp_filename))
+    if result != 0:
+        utils.fubar ("vi invocation failed for `%s'!" % (temp_filename), result)
+    # Read the (edited) data back in
+    file = utils.open_file(temp_filename);
+    for line in file.readlines():
+        line = string.strip(line[:-1]);
+        if line == "":
+            continue;
+        s = string.split(line);
+        # Pad the list if necessary
+        s[len(s):3] = [None] * (3-len(s));
+        (pkg, priority, section) = s[:3];
+        if not new.has_key(pkg):
+            utils.warn("Ignoring unknown package '%s'" % (pkg));
+        else:
+            # Strip off any invalid markers, print_new will readd them.
+            if section[-3:] == "[!]":
+                section = section[:-3];
+            if priority[-3:] == "[!]":
+                priority = priority[:-3];
+            for file in new[pkg]["files"]:
+                Katie.pkg.files[file]["section"] = section;
+                Katie.pkg.files[file]["priority"] = priority;
+            new[pkg]["section"] = section;
+            new[pkg]["priority"] = priority;
+    os.unlink(temp_filename);
+
+################################################################################
+
+def edit_index (new, index):
+    priority = new[index]["priority"]
+    section = new[index]["section"]
+    type = new[index]["type"];
+    done = 0
+    while not done:
+        print string.join([index, priority, section], '\t');
+
+        answer = "XXX";
+        if type != "dsc":
+            prompt = "[B]oth, Priority, Section, Done ? ";
+        else:
+            prompt = "[S]ection, Done ? ";
+        edit_priority = edit_section = 0;
+
+        while string.find(prompt, answer) == -1:
+            answer = raw_input(prompt);
+            m = katie.re_default_answer.match(prompt)
+            if answer == "":
+                answer = m.group(1)
+            answer = string.upper(answer[:1])
+
+        if answer == 'P':
+            edit_priority = 1;
+        elif answer == 'S':
+            edit_section = 1;
+        elif answer == 'B':
+            edit_priority = edit_section = 1;
+        elif answer == 'D':
+            done = 1;
+
+        # Edit the priority
+        if edit_priority:
+            readline.set_completer(Priorities.complete);
+            got_priority = 0;
+            while not got_priority:
+                new_priority = string.strip(raw_input("New priority: "));
+                if Priorities.priorities.count(new_priority) == 0:
+                    print "E: '%s' is not a valid priority, try again." % (new_priority);
+                else:
+                    got_priority = 1;
+                    priority = new_priority;
+
+        # Edit the section
+        if edit_section:
+            readline.set_completer(Sections.complete);
+            got_section = 0;
+            while not got_section:
+                new_section = string.strip(raw_input("New section: "));
+                if Sections.sections.count(new_section) == 0:
+                    print "E: '%s' is not a valid section, try again." % (new_section);
+                else:
+                    got_section = 1;
+                    section = new_section;
+
+        # Reset the readline completer
+        readline.set_completer(None);
+
+    for file in new[index]["files"]:
+        Katie.pkg.files[file]["section"] = section;
+        Katie.pkg.files[file]["priority"] = priority;
+    new[index]["priority"] = priority;
+    new[index]["section"] = section;
+    return new;
+
+################################################################################
+
+def edit_overrides (new):
+    print;
+    done = 0
+    while not done:
+        print_new (new, 1);
+        new_index = {};
+        index = 0;
+        for i in new.keys():
+            index = index + 1;
+            new_index[index] = i;
+
+        prompt = "(%s) edit override <n>, Editor, Done ? " % (index_range(index));
+
+        got_answer = 0
+        while not got_answer:
+            answer = raw_input(prompt)
+            answer = string.upper(answer[:1])
+            if answer == "E" or answer == "D":
+                got_answer = 1;
+            elif katie.re_isanum.match (answer):
+                answer = int(answer);
+                if (answer < 1) or (answer > index):
+                    print "%s is not a valid index (%s).  Please retry." % (index_range(index), answer);
+                else:
+                    got_answer = 1;
+
+        if answer == 'E':
+            spawn_editor(new);
+        elif answer == 'D':
+            done = 1;
+        else:
+            edit_index (new, new_index[answer]);
+
+    return new;
+
+################################################################################
+
+def check_pkg ():
+    try:
+        less_fd = os.popen("less -", 'w', 0);
+        stdout_fd = sys.stdout;
+        try:
+            sys.stdout = less_fd;
+            fernanda.display_changes(Katie.pkg.changes_file);
+            files = Katie.pkg.files;
+            for file in files.keys():
+                if files[file].has_key("new"):
+                    type = files[file]["type"];
+                    if type == "deb":
+                        fernanda.check_deb(file);
+                    elif type == "dsc":
+                        fernanda.check_dsc(file);
+        finally:
+            sys.stdout = stdout_fd;
+    except IOError, e:
+        if errno.errorcode[e.errno] == 'EPIPE':
+            utils.warn("[fernanda] Caught EPIPE; skipping.");
+            pass;
+        else:
+            raise;
+    except KeyboardInterrupt:
+        utils.warn("[fernanda] Caught C-c; skipping.");
+        pass;
+
+################################################################################
+
+## FIXME: horribly Debian specific
+
+def do_bxa_notification():
+    files = Katie.pkg.files;
+    summary = "";
+    for file in files.keys():
+        if files[file]["type"] == "deb":
+            control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)));
+            summary = summary + "\n";
+            summary = summary + "Package: %s\n" % (control.Find("Package"));
+            summary = summary + "Description: %s\n" % (control.Find("Description"));
+    Katie.Subst["__BINARY_DESCRIPTIONS__"] = summary;
+    bxa_mail = utils.TemplateSubst(Katie.Subst,open(Cnf["Dir::TemplatesDir"]+"/lisa.bxa_notification","r").read());
+    utils.send_mail(bxa_mail,"");
+
+################################################################################
+
+def add_overrides (new):
+    changes = Katie.pkg.changes;
+    files = Katie.pkg.files;
+
+    projectB.query("BEGIN WORK");
+    for suite in changes["distribution"].keys():
+        suite_id = db_access.get_suite_id(suite);
+        for pkg in new.keys():
+            component_id = db_access.get_component_id(new[pkg]["component"]);
+            type_id = db_access.get_override_type_id(new[pkg]["type"]);
+            priority_id = new[pkg]["priority id"];
+            section_id = new[pkg]["section id"];
+            projectB.query("INSERT INTO override (suite, component, type, package, priority, section) VALUES (%s, %s, %s, '%s', %s, %s)" % (suite_id, component_id, type_id, pkg, priority_id, section_id));
+            for file in new[pkg]["files"]:
+                if files[file].has_key("new"):
+                    del files[file]["new"];
+            del new[pkg];
+
+    projectB.query("COMMIT WORK");
+
+    if Cnf.FindI("Dinstall::BXANotify"):
+        do_bxa_notification();
+
+################################################################################
+
+def do_new():
+    print "NEW\n";
+    files = Katie.pkg.files;
+    changes = Katie.pkg.changes;
+
+    # Fix up the list of target suites
+    for suite in changes["distribution"].keys():
+        override = Cnf.Find("Suite::%s::OverrideSuite" % (suite));
+        if override:
+            del changes["distribution"][suite];
+            changes["distribution"][override] = 1;
+    # Validate suites
+    for suite in changes["distribution"].keys():
+        suite_id = db_access.get_suite_id(suite);
+        if suite_id == -1:
+            utils.fubar("%s has invalid suite '%s' (possibly overriden).  say wha?" % (changes, suite));
+
+    # The main NEW processing loop
+    done = 0;
+    while not done:
+        # Find out what's new
+        new = determine_new(changes, files);
+
+        if not new:
+            break;
+
+        answer = "XXX";
+        if Options["No-Action"] or Options["Automatic"]:
+            answer = 'S'
+        if Options["Automatic"]:
+            answer = 'A';
+
+        broken = print_new(new, 0);
+        prompt = "";
+        if not broken:
+            prompt = "[A]dd overrides, ";
+        else:
+            print "W: [!] marked entries must be fixed before package can be processed.";
+            if answer == 'A':
+                answer = 'E';
+        prompt = prompt + "Edit overrides, Check, Manual reject, Skip, Quit ?";
+
+        while string.find(prompt, answer) == -1:
+            answer = raw_input(prompt)
+            m = katie.re_default_answer.match(prompt)
+            if answer == "":
+                answer = m.group(1)
+            answer = string.upper(answer[:1])
+
+        if answer == 'A':
+            done = add_overrides (new);
+        elif answer == 'C':
+            check_pkg();
+        elif answer == 'E':
+            new = edit_overrides (new);
+        elif answer == 'M':
+            Katie.do_reject(1, Options["Manual-Reject"]);
+            os.unlink(Katie.pkg.changes_file[:-8]+".katie");
+            done = 1;
+        elif answer == 'S':
+            done = 1;
+        elif answer == 'Q':
+            sys.exit(0)
+
+################################################################################
+################################################################################
+################################################################################
+
+def usage (exit_code=0):
+    print """Usage: lisa [OPTION]... [CHANGES]...
+  -a, --automatic           automatic run
+  -h, --help                show this help and exit.
+  -m, --manual-reject=MSG   manual reject with `msg'
+  -n, --no-action           don't do anything
+  -s, --sort=TYPE           sort type ('time' or 'normal')
+  -V, --version             display the version number and exit"""
+    sys.exit(exit_code)
+
+################################################################################
+
+def init():
+    global Cnf, Options, Logger, Katie, projectB, Sections, Priorities;
+
+    Cnf = utils.get_conf();
+
+    Arguments = [('a',"automatic","Lisa::Options::Automatic"),
+                 ('h',"help","Lisa::Options::Help"),
+                 ('m',"manual-reject","Lisa::Options::Manual-Reject", "HasArg"),
+                 ('n',"no-action","Lisa::Options::No-Action"),
+                 ('s',"sort","Lisa::Options::Sort","HasArg"),
+                 ('V',"version","Lisa::Options::Version")];
+
+    for i in ["automatic", "help", "manual-reject", "no-action", "version"]:
+        if not Cnf.has_key("Lisa::Options::%s" % (i)):
+            Cnf["Lisa::Options::%s" % (i)] = "";
+    if not Cnf.has_key("Lisa::Options::Sort"):
+        Cnf["Lisa::Options::Sort"] = "time";
+
+    changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv);
+    Options = Cnf.SubTree("Lisa::Options")
+
+    if Options["Help"]:
+        usage();
+
+    if Options["Version"]:
+        print "lisa %s" % (lisa_version);
+        sys.exit(0);
+
+    if Options["Sort"] != "time" and Options["Sort"] != "normal":
+        utils.fubar("Unrecognised sort type '%s'. (Recognised sort types are: time and normal)" % (Options["Sort"]));
+
+    Katie = katie.Katie(Cnf);
+
+    if not Options["No-Action"]:
+        Logger = Katie.Logger = logging.Logger(Cnf, "lisa");
+
+    projectB = Katie.projectB;
+
+    Sections = Section_Completer();
+    Priorities = Priority_Completer();
+    readline.parse_and_bind("tab: complete");
+
+    return changes_files;
+
+################################################################################
+
+def do_byhand():
+    done = 0;
+    while not done:
+        files = Katie.pkg.files;
+        will_install = 1;
+        byhand = [];
+
+        for file in files.keys():
+            if files[file]["type"] == "byhand":
+                if os.path.exists(file):
+                    print "W: %s still present; please process byhand components and try again." % (file);
+                    will_install = 0;
+                else:
+                    byhand.append(file);
+
+        answer = "XXXX";
+        if Options["No-Action"]:
+            answer = "S";
+        if will_install:
+            if Options["Automatic"] and not Options["No-Action"]:
+                answer = 'A';
+            prompt = "[A]ccept, Manual reject, Skip, Quit ?";
+        else:
+            prompt = "Manual reject, [S]kip, Quit ?";
+
+        while string.find(prompt, answer) == -1:
+            answer = raw_input(prompt)
+            m = katie.re_default_answer.match(prompt)
+            if answer == "":
+                answer = m.group(1)
+            answer = string.upper(answer[:1])
+
+        if answer == 'A':
+            done = 1;
+            for file in byhand:
+                del files[file];
+        elif answer == 'M':
+            Katie.do_reject(1, Options["Manual-Reject"]);
+            os.unlink(Katie.pkg.changes_file[:-8]+".katie");
+            done = 1;
+        elif answer == 'S':
+            done = 1;
+        elif answer == 'Q':
+            sys.exit(0);
+
+################################################################################
+
+def do_accept():
+    print "ACCEPT";
+    if not Options["No-Action"]:
+        (summary, short_summary) = Katie.build_summaries();
+        Katie.accept(summary, short_summary);
+        os.unlink(Katie.pkg.changes_file[:-8]+".katie");
+
+def check_status(files):
+    new = byhand = 0;
+    for file in files.keys():
+        if files[file]["type"] == "byhand":
+            byhand = 1;
+        elif files[file].has_key("new"):
+            new = 1;
+    return (new, byhand);
+
+def do_pkg(changes_file):
+    Katie.pkg.changes_file = changes_file;
+    Katie.init_vars();
+    Katie.update_vars();
+    Katie.update_subst();
+    files = Katie.pkg.files;
+
+    (new, byhand) = check_status(files);
+    if new or byhand:
+        if new:
+            do_new();
+        if byhand:
+            do_byhand();
+        (new, byhand) = check_status(files);
+
+    if not new and not byhand:
+        do_accept();
+
+################################################################################
+
+def end():
+    accept_count = Katie.accept_count;
+    accept_bytes = Katie.accept_bytes;
+
+    if accept_count:
+        sets = "set"
+        if accept_count > 1:
+            sets = "sets"
+        sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, utils.size_type(int(accept_bytes))));
+        Logger.log(["total",accept_count,accept_bytes]);
+
+    if not Options["No-Action"]:
+        Logger.close();
+
+################################################################################
+
+def main():
+    changes_files = init();
+
+    # Sort the changes files
+    if Options["Sort"] == "time":
+        changes_files.sort(changes_compare_by_time);
+    else:
+        changes_files.sort(utils.changes_compare);
+
+    # Kill me now? **FIXME**
+    Cnf["Dinstall::Options::No-Mail"] = "";
+    bcc = "X-Lisa: %s" % (lisa_version);
+    if Cnf.has_key("Dinstall::Bcc"):
+        Katie.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"]);
+    else:
+        Katie.Subst["__BCC__"] = bcc;
+
+    for changes_file in changes_files:
+        print "\n" + changes_file;
+        do_pkg (changes_file);
+
+    end();
+
+################################################################################
+
+if __name__ == '__main__':
+    main()
diff --git a/templates/jennifer.accepted b/templates/jennifer.accepted
new file mode 100644 (file)
index 0000000..e8a7837
--- /dev/null
@@ -0,0 +1,10 @@
+From: __KATIE_ADDRESS__
+To: __MAINTAINER_TO__
+__BCC__
+Subject: __CHANGES_FILENAME__ ACCEPTED__SUITE__
+
+__REJECT_MESSAGE__
+Accepted:
+__SUMMARY__
+
+Thank you for your contribution to __DISTRO__.
diff --git a/templates/jennifer.announce b/templates/jennifer.announce
new file mode 100644 (file)
index 0000000..0a2c624
--- /dev/null
@@ -0,0 +1,9 @@
+From: __MAINTAINER_FROM__
+To: __ANNOUNCE_LIST_ADDRESS__
+__BCC__
+Subject: Accepted __SOURCE__ __VERSION__ (__ARCHITECTURE__)
+
+__FILE_CONTENTS__
+
+Accepted:
+__SHORT_SUMMARY__
diff --git a/templates/jennifer.bug-close b/templates/jennifer.bug-close
new file mode 100644 (file)
index 0000000..ef48ac8
--- /dev/null
@@ -0,0 +1,28 @@
+From: __MAINTAINER_FROM__
+To: __BUG_NUMBER__-close@__BUG_SERVER__
+__BCC__
+Subject: Bug#__BUG_NUMBER__: fixed in __SOURCE__ __VERSION__
+
+We believe that the bug you reported is fixed in the latest version of
+__SOURCE__, which is due to be installed in the __DISTRO__ FTP archive:
+
+__SHORT_SUMMARY__
+__STABLE_WARNING__
+
+A summary of the changes between this version and the previous one is
+attached.
+
+Thank you for reporting the bug, which will now be closed.  If you
+have further comments please address them to __BUG_NUMBER__@__BUG_SERVER__,
+and the maintainer will reopen the bug report if appropriate.
+
+__DISTRO__ distribution maintenance software
+pp.
+__MAINTAINER__ (supplier of updated __SOURCE__ package)
+
+(This message was generated automatically at their request; if you
+believe that there is a problem with it please contact the archive
+administrators by mailing __ADMIN_ADDRESS__)
+
+
+__FILE_CONTENTS__
diff --git a/templates/jennifer.bug-nmu-fixed b/templates/jennifer.bug-nmu-fixed
new file mode 100644 (file)
index 0000000..b20ab15
--- /dev/null
@@ -0,0 +1,13 @@
+From: __MAINTAINER_FROM__
+To: control@__BUG_SERVER__
+Cc: __MAINTAINER_TO__
+__BCC__
+Subject: Fixed in NMU of __SOURCE__ __VERSION__
+
+__CONTROL_MESSAGE__
+quit
+
+This message was generated automatically in response to a
+non-maintainer upload.  The .changes file follows.
+
+__FILE_CONTENTS__
diff --git a/templates/jennifer.new b/templates/jennifer.new
new file mode 100644 (file)
index 0000000..908178d
--- /dev/null
@@ -0,0 +1,13 @@
+From: __KATIE_ADDRESS__
+To: __MAINTAINER_TO__
+__BCC__
+Subject: __CHANGES_FILENAME__ is NEW
+
+__SUMMARY__
+
+Your package contains new components which requires manual editing of
+the override file.  It is ok otherwise, so please be patient.  New
+packages are usually added to the override file about once a week.
+
+You may have gotten the distribution wrong.  You'll get warnings above
+if files already exist in other distributions.
diff --git a/templates/jennifer.override-disparity b/templates/jennifer.override-disparity
new file mode 100644 (file)
index 0000000..6696720
--- /dev/null
@@ -0,0 +1,26 @@
+From: __KATIE_ADDRESS__
+To: __MAINTAINER_TO__
+__BCC__
+Subject: __SOURCE__ override disparity
+
+There are disparities between your recently accepted upload and the
+override file for the following file(s):
+
+__SUMMARY__
+Either the package or the override file is incorrect.  If you think
+the override is correct and the package wrong please fix the package
+so that this disparity is fixed in the next upload.  If you feel the
+override is incorrect then please reply to this mail and explain why.
+
+[NB: this is an automatically generated mail; if you replied to one
+like it before and have not received a response yet, please ignore
+this mail.  Your reply needs to be processed by a human and will be in
+due course, but until then the installer will send these automated
+mails; sorry.]
+
+--
+__DISTRO__ distribution maintenance software
+
+(This message was generated automatically; if you believe that there
+is a problem with it please contact the archive administrators by
+mailing __ADMIN_ADDRESS__)
diff --git a/templates/katie.unaccept b/templates/katie.unaccept
new file mode 100644 (file)
index 0000000..f2e46bb
--- /dev/null
@@ -0,0 +1,18 @@
+From: __REJECTOR_ADDRESS__
+To: __MAINTAINER_TO__
+__CC__
+__BCC__
+Subject: __CHANGES_FILENAME__ UNACCEPT
+
+__REJECT_MESSAGE__
+
+===
+
+Despite being ACCEPTed, this package failed the database sanity checks
+at the time of install.  This should only happen rarely and in
+corner-cases (a binary upload of a package which has since been
+melanie'd for example), so no code to do the necessary unaccept
+actions has been written.  These actions (e.g. bug reopening,
+announcement rescinding, etc.) will have to be done by hand.  Also,
+the files have been left in the accepted directory; please deal with
+them as well.
diff --git a/templates/lisa.bxa_notification b/templates/lisa.bxa_notification
new file mode 100644 (file)
index 0000000..6145962
--- /dev/null
@@ -0,0 +1,49 @@
+From: __KATIE_ADDRESS__
+To: crypt@bxa.doc.gov
+Cc: bxa@ftp-master.debian.org
+__BCC__
+Subject: Addition to __DISTRO__ Source Code
+
+                                                     Department of Commerce
+                                            Bureau of Export Administration
+                      Office of Strategic Trade and Foreign Policy Controls
+                                     14th Street and Pennsylvania Ave., N.W.
+                                                                   Room 2705
+                                                        Washington, DC 20230
+
+Re:  Unrestricted Encryption Source Code Notification
+Commodity: Addition to Debian Source Code
+
+Dear Sir/Madam,
+
+     Pursuant to paragraph (e)(1) of Part 740.13 of the U.S. Export
+Administration Regulations ("EAR", 15 CFR Part 730 et seq.), we are
+providing this written notification of the Internet location of the
+unrestricted, publicly available Source Code of a package being added
+to the Debian Source Code. Debian Source Code is a free operating
+system developed by a group of individuals, coordinated by the
+non-profit Software in the Public Interest.  This notification serves
+as a notification of an addition of new software to the Debian
+archive.  Previous notifications have covered the archive as a whole
+and other software added in the past.  This archive is updated from
+time to time, but its location is constant.  Therefore, and this
+notification serves as a one-time notification for subsequent updates
+that may occur in the future to the software covered by this
+notification.  Such updates may add or enhance cryptographic
+functionality of the Debian operating system.  The Internet location
+for the Debian Source Code is: http://ftp.debian.org/debian/
+
+This site is mirrored to a number of other sites located outside the
+United States.
+
+The following software is added to the Debian archive:
+
+----------------------------------------------------------------------
+__BINARY_DESCRIPTIONS__
+----------------------------------------------------------------------
+
+If you have any questions, please call me on (804) 695-9730.
+
+     Sincerely,
+        Ben Collins
+        Debian Developer