]> git.decadent.org.uk Git - dak.git/commitdiff
Merge commit 'origin/sqlalchemy' into sqlalchemy
authorMark Hymers <mhy@debian.org>
Mon, 26 Oct 2009 08:58:50 +0000 (08:58 +0000)
committerMark Hymers <mhy@debian.org>
Mon, 26 Oct 2009 08:58:50 +0000 (08:58 +0000)
26 files changed:
config/debian/cron.dinstall
config/debian/dak.conf
dak/contents.py
dak/generate_releases.py
dak/make_suite_file_list.py
dak/new_security_install.py
dak/override.py
dak/process_new.py
dak/process_unchecked.py
dak/transitions.py
daklib/database.py
daklib/utils.py
docs/README.config
docs/talks/DebConf9/Makefile [new file with mode: 0644]
docs/talks/DebConf9/background.jpg [new file with mode: 0644]
docs/talks/DebConf9/ftpmaster.pdf [new file with mode: 0644]
docs/talks/DebConf9/ftpmaster.tex [new file with mode: 0644]
scripts/debian/byhand-dm [deleted file]
scripts/debian/ftpstats.R [new file with mode: 0644]
templates/rm.bug-close
tools/debianqueued-0.9/config
tools/debianqueued-0.9/config-upload
tools/debianqueued-0.9/debianqueued
web/ftpmaster.pdf [new symlink]
web/index.html
web/reject.html

index a7d901af09e47b4030c6ca79a7613bc4407be508..5a8f107369033db0d660daecbd412a65daddd082 100755 (executable)
@@ -210,6 +210,28 @@ function msfl() {
 function fingerprints() {
     log "Updating fingerprints"
     dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
+
+    OUTFILE=$(mktemp)
+    dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
+
+    if [ -s "${OUTFILE}" ]; then
+        /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
+From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
+To: <debian-project@lists.debian.org>
+Subject: Debian Maintainers Keyring changes
+Content-Type: text/plain; charset=utf-8
+MIME-Version: 1.0
+
+The following changes to the debian-maintainers keyring have just been activated:
+
+$(cat $OUTFILE)
+
+Debian distribution maintenance software,
+on behalf of the Keyring maintainers
+
+EOF
+    fi
+    rm -f "$OUTFILE"
 }
 
 function overrides() {
@@ -407,14 +429,14 @@ function maillogfile() {
 function renamelogfile() {
     if [ -f "${dbdir}/dinstallstart" ]; then
         NOW=$(cat "${dbdir}/dinstallstart")
-        maillogfile
+#        maillogfile
         mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
         logstats "$logdir/dinstall_${NOW}.log"
         bzip2 -9 "$logdir/dinstall_${NOW}.log"
     else
         error "Problem, I don't know when dinstall started, unable to do log statistics."
         NOW=`date "+%Y.%m.%d-%H:%M:%S"`
-        maillogfile
+#        maillogfile
         mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
         bzip2 -9 "$logdir/dinstall_${NOW}.log"
     fi
@@ -456,7 +478,7 @@ function stage() {
     ARGS='GO[@]'
     local "${!ARGS}"
 
-    error=${ERR:-"false"}
+    error=${ERR:-"true"}
 
     STAGEFILE="${stagedir}/${FUNC}"
     if [ -f "${STAGEFILE}" ]; then
index 6ad96a6b8cd97a793bf0470b86485d66b9687c18..6a99dfbf9640b062f5feecd19e96824f43a05013 100644 (file)
@@ -3,7 +3,7 @@ Dinstall
    GPGKeyring {
       "/srv/keyring.debian.org/keyrings/debian-keyring.gpg";
       "/srv/keyring.debian.org/keyrings/debian-keyring.pgp";
-      "/srv/ftp.debian.org/keyrings/debian-maintainers.gpg";
+      "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg";
    };
    SigningKeyring "/srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg";
    SigningPubKeyring "/srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg";
@@ -94,7 +94,7 @@ Import-Users-From-Passwd
 {
   ValidGID "800";
   // Comma separated list of users who are in Postgres but not the passwd file
-  KnownPostgres "postgres,dak,katie,release";
+  KnownPostgres "postgres,dak,katie,release,qa,www-data,guest";
 };
 
 Clean-Queues
@@ -137,7 +137,7 @@ Import-Archive
 
 Import-Keyring
 {
-  /srv/ftp.debian.org/keyrings/debian-maintainers.gpg
+  /srv/keyring.debian.org/keyrings/debian-maintainers.gpg
     {
       Debian-Maintainer "true";
     };
@@ -525,13 +525,6 @@ AutomaticByHandPackages {
     Script "/srv/ftp.debian.org/dak/scripts/debian/byhand-di";
   };
 
-  "debian-maintainers" {
-    Source "debian-maintainers";
-    Section "raw-keyring";
-    Extension "gpg";
-    Script "/srv/ftp.debian.org/dak/scripts/debian/byhand-dm";
-  };
-
   "tag-overrides" {
     Source "tag-overrides";
     Section "byhand";
index b94aa0de7ca7a99ba5d6f621aaba5b72336f450e..9ac99951400e675aade746683566ebeaeaf73ce8 100755 (executable)
@@ -104,7 +104,7 @@ class GzippedContentWriter(object):
 
     def __init__(self, filename):
         """
-        @ptype filename: string
+        @type filename: string
         @param filename: the name of the file to write to
         """
         self.queue = Queue.Queue()
index 0bd7a69f60ec17e6a6e5826d22534b1e7a3afa0c..11e37807121181c892bca57ab40471fec5674969 100755 (executable)
@@ -59,6 +59,24 @@ def add_tiffani (files, path, indexstem):
         #print "ALERT: there was a tiffani file %s" % (filepath)
         files.append(index)
 
+def gen_i18n_index (files, tree, sec):
+    path = Cnf["Dir::Root"] + tree + "/"
+    i18n_path = "%s/i18n" % (sec)
+    if os.path.exists("%s/%s" % (path, i18n_path)):
+        index = "%s/Index" % (i18n_path)
+        out = open("%s/%s" % (path, index), "w")
+        out.write("SHA1:\n")
+        for x in os.listdir("%s/%s" % (path, i18n_path)):
+            if x.startswith('Translation-'):
+                f = open("%s/%s/%s" % (path, i18n_path, x), "r")
+                size = os.fstat(f.fileno())[6]
+                f.seek(0)
+                sha1sum = apt_pkg.sha1sum(f)
+                f.close()
+                out.write(" %s %7d %s\n" % (sha1sum, size, x))
+        out.close()
+        files.append(index)
+
 def compressnames (tree,type,file):
     compress = AptCnf.get("%s::%s::Compress" % (tree,type), AptCnf.get("Default::%s::Compress" % (type), ". gzip"))
     result = []
@@ -258,7 +276,10 @@ def main ():
             else:
                 for x in os.listdir("%s/%s" % (Cnf["Dir::Root"], tree)):
                     if x.startswith('Contents-'):
-                        files.append(x)
+                        if x.endswith('.diff'):
+                            files.append("%s/Index" % (x))
+                        else:
+                            files.append(x)
 
             for sec in AptCnf["tree::%s::Sections" % (tree)].split():
                 for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
@@ -287,6 +308,7 @@ def main ():
                     relpath = Cnf["Dir::Root"]+tree+"/"+rel
                     write_release_file(relpath, suite, sec, origin, label, arch, version, suite_suffix, notautomatic)
                     files.append(rel)
+                gen_i18n_index(files, tree, sec)
 
             if AptCnf.has_key("tree::%s/main" % (tree)):
                 for dis in ["main", "contrib", "non-free"]:
index de62adb9909774baa728b3e37b8fe9ede68d0629..096098bfb9cabb6456efdeb35490059e7abc7f70 100755 (executable)
@@ -275,7 +275,7 @@ def write_filelists(packages, dislocated_files, session):
             if not Options["Architecture"]:
                 architectures = [ a.arch_string for a in get_suite_architectures(suite, session=session) ]
             else:
-                architectures = utils.split_args(Options["Architectures"])
+                architectures = utils.split_args(Options["Architecture"])
             for arch in [ i.lower() for i in architectures ]:
                 d[suite][component].setdefault(arch, {})
                 if arch == "source":
index fec030b7d15fce6a484d41e2516126c9373ff54c..24e89b923e72e667bf0cf398e4ac5c8ecec1d3d6 100755 (executable)
@@ -60,9 +60,7 @@ def init():
 
     Options = Cnf.SubTree("Security-Install::Options")
 
-    whoami = os.getuid()
-    whoamifull = pwd.getpwuid(whoami)
-    username = whoamifull[0]
+    username = utils.getusername()
     if username != "dak":
         print "Non-dak user: %s" % username
         Options["Sudo"] = "y"
index 9e1735e87bee77bc537be2f29c50a1fde36d06c3..413c344747fb6775251ee5a11ed39605c2544160 100755 (executable)
@@ -240,9 +240,9 @@ def main ():
             Subst["__BCC__"] = "Bcc: " + ", ".join(bcc)
         else:
             Subst["__BCC__"] = "X-Filler: 42"
-        Subst["__CC__"] = "X-DAK: dak override\nX-Katie: alicia"
-        Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
-        Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
+        Subst["__CC__"] = "Cc: " + package + "@" + Cnf["Dinstall::PackagesServer"] + "\nX-DAK: dak override\nX-Katie: alicia"
+        Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
+        Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
         Subst["__WHOAMI__"] = utils.whoami()
         Subst["__SOURCE__"] = package
 
index 777e0791afb72dec03b5d17f0ec4e5901bc4f60f..f15a56003f67ae82e5ec347328d94bdae8100125 100755 (executable)
@@ -554,6 +554,7 @@ def do_bxa_notification():
 def add_overrides (new):
     changes = Upload.pkg.changes
     files = Upload.pkg.files
+    srcpkg = changes.get("source")
 
     projectB.query("BEGIN WORK")
     for suite in changes["suite"].keys():
@@ -563,6 +564,7 @@ def add_overrides (new):
             type_id = database.get_override_type_id(new[pkg]["type"])
             priority_id = new[pkg]["priority id"]
             section_id = new[pkg]["section id"]
+            Logger.log(["%s overrides" % (srcpkg), suite, new[pkg]["component"], new[pkg]["type"], new[pkg]["priority"], new[pkg]["section"]])
             projectB.query("INSERT INTO override (suite, component, type, package, priority, section, maintainer) VALUES (%s, %s, %s, '%s', %s, %s, '')" % (suite_id, component_id, type_id, pkg, priority_id, section_id))
             for f in new[pkg]["files"]:
                 if files[f].has_key("new"):
@@ -693,6 +695,7 @@ def do_new():
             try:
                 check_daily_lock()
                 done = add_overrides (new)
+                Logger.log([utils.getusername(), "NEW ACCEPT: %s" % (Upload.pkg.changes_file)])
             except CantGetLockError:
                 print "Hello? Operator! Give me the number for 911!"
                 print "Dinstall in the locked area, cant process packages, come back later"
@@ -705,12 +708,14 @@ def do_new():
                                        reject_message=Options["Manual-Reject"],
                                        note=database.get_new_comments(changes.get("source", "")))
             if not aborted:
+                Logger.log([utils.getusername(), "NEW REJECT: %s" % (Upload.pkg.changes_file)])
                 os.unlink(Upload.pkg.changes_file[:-8]+".dak")
                 done = 1
         elif answer == 'N':
             edit_note(database.get_new_comments(changes.get("source", "")))
         elif answer == 'P' and not Options["Trainee"]:
             prod_maintainer(database.get_new_comments(changes.get("source", "")))
+            Logger.log([utils.getusername(), "NEW PROD: %s" % (Upload.pkg.changes_file)])
         elif answer == 'R' and not Options["Trainee"]:
             confirm = utils.our_raw_input("Really clear note (y/N)? ").lower()
             if confirm == "y":
@@ -823,10 +828,12 @@ def do_byhand():
                 done = 1
                 for f in byhand:
                     del files[f]
+                Logger.log([utils.getusername(), "BYHAND ACCEPT: %s" % (Upload.pkg.changes_file)])
             except CantGetLockError:
                 print "Hello? Operator! Give me the number for 911!"
                 print "Dinstall in the locked area, cant process packages, come back later"
         elif answer == 'M':
+            Logger.log([utils.getusername(), "BYHAND REJECT: %s" % (Upload.pkg.changes_file)])
             Upload.do_reject(1, Options["Manual-Reject"])
             os.unlink(Upload.pkg.changes_file[:-8]+".dak")
             done = 1
@@ -929,10 +936,12 @@ def do_accept_stableupdate(suite, q):
             # writing this means that it is installed, so put it into
             # accepted.
             print "Binary-only upload, source installed."
+            Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
             _accept()
         elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
             # The source is in accepted, the binary cleared NEW: accept it.
             print "Binary-only upload, source in accepted."
+            Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
             _accept()
         elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
             # It's in NEW.  We expect the source to land in p-u holding
@@ -942,6 +951,7 @@ def do_accept_stableupdate(suite, q):
         elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
             # It's in newstage.  Accept into the holding area
             print "Binary-only upload, source in newstage."
+            Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
             _accept()
         else:
             # No case applicable.  Bail out.  Return will cause the upload
@@ -1031,7 +1041,7 @@ def end():
         if accept_count > 1:
             sets = "sets"
         sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, utils.size_type(int(accept_bytes))))
-        Logger.log(["total",accept_count,accept_bytes])
+        Logger.log([utils.getusername(), "total",accept_count,accept_bytes])
 
     if not Options["No-Action"] and not Options["Trainee"]:
         Logger.close()
index cae48a7ee0c54647be58048d3d142ef8dc5b898e..cb31d8a24b44eb2da38a373dc4e2ca685179ac98 100755 (executable)
@@ -122,8 +122,1021 @@ def usage (exit_code=0):
 
 ################################################################################
 
-def action(u):
-    cnf = Config()
+def reject (str, prefix="Rejected: "):
+    global reject_message
+    if str:
+        reject_message += prefix + str + "\n"
+
+################################################################################
+
+def copy_to_holding(filename):
+    global in_holding
+
+    base_filename = os.path.basename(filename)
+
+    dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename
+    try:
+        fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640)
+        os.close(fd)
+    except OSError, e:
+        # Shouldn't happen, but will if, for example, someone lists a
+        # file twice in the .changes.
+        if errno.errorcode[e.errno] == 'EEXIST':
+            reject("%s: already exists in holding area; can not overwrite." % (base_filename))
+            return
+        raise
+
+    try:
+        shutil.copy(filename, dest)
+    except IOError, e:
+        # In either case (ENOENT or EACCES) we want to remove the
+        # O_CREAT | O_EXCLed ghost file, so add the file to the list
+        # of 'in holding' even if it's not the real file.
+        if errno.errorcode[e.errno] == 'ENOENT':
+            reject("%s: can not copy to holding area: file not found." % (base_filename))
+            os.unlink(dest)
+            return
+        elif errno.errorcode[e.errno] == 'EACCES':
+            reject("%s: can not copy to holding area: read permission denied." % (base_filename))
+            os.unlink(dest)
+            return
+        raise
+
+    in_holding[base_filename] = ""
+
+################################################################################
+
+def clean_holding():
+    global in_holding
+
+    cwd = os.getcwd()
+    os.chdir(Cnf["Dir::Queue::Holding"])
+    for f in in_holding.keys():
+        if os.path.exists(f):
+            if f.find('/') != -1:
+                utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f))
+            else:
+                os.unlink(f)
+    in_holding = {}
+    os.chdir(cwd)
+
+################################################################################
+
+def check_changes():
+    filename = pkg.changes_file
+
+    # Parse the .changes field into a dictionary
+    try:
+        changes.update(utils.parse_changes(filename))
+    except CantOpenError:
+        reject("%s: can't read file." % (filename))
+        return 0
+    except ParseChangesError, line:
+        reject("%s: parse error, can't grok: %s." % (filename, line))
+        return 0
+    except ChangesUnicodeError:
+        reject("%s: changes file not proper utf-8" % (filename))
+        return 0
+
+    # Parse the Files field from the .changes into another dictionary
+    try:
+        files.update(utils.build_file_list(changes))
+    except ParseChangesError, line:
+        reject("%s: parse error, can't grok: %s." % (filename, line))
+    except UnknownFormatError, format:
+        reject("%s: unknown format '%s'." % (filename, format))
+        return 0
+
+    # Check for mandatory fields
+    for i in ("source", "binary", "architecture", "version", "distribution",
+              "maintainer", "files", "changes", "description"):
+        if not changes.has_key(i):
+            reject("%s: Missing mandatory field `%s'." % (filename, i))
+            return 0    # Avoid <undef> errors during later tests
+
+    # Strip a source version in brackets from the source field
+    if re_strip_srcver.search(changes["source"]):
+        changes["source"] = re_strip_srcver.sub('', changes["source"])
+
+    # Ensure the source field is a valid package name.
+    if not re_valid_pkg_name.match(changes["source"]):
+        reject("%s: invalid source name '%s'." % (filename, changes["source"]))
+
+    # Split multi-value fields into a lower-level dictionary
+    for i in ("architecture", "distribution", "binary", "closes"):
+        o = changes.get(i, "")
+        if o != "":
+            del changes[i]
+        changes[i] = {}
+        for j in o.split():
+            changes[i][j] = 1
+
+    # Fix the Maintainer: field to be RFC822/2047 compatible
+    try:
+        (changes["maintainer822"], changes["maintainer2047"],
+         changes["maintainername"], changes["maintaineremail"]) = \
+         utils.fix_maintainer (changes["maintainer"])
+    except ParseMaintError, msg:
+        reject("%s: Maintainer field ('%s') failed to parse: %s" \
+               % (filename, changes["maintainer"], msg))
+
+    # ...likewise for the Changed-By: field if it exists.
+    try:
+        (changes["changedby822"], changes["changedby2047"],
+         changes["changedbyname"], changes["changedbyemail"]) = \
+         utils.fix_maintainer (changes.get("changed-by", ""))
+    except ParseMaintError, msg:
+        (changes["changedby822"], changes["changedby2047"],
+         changes["changedbyname"], changes["changedbyemail"]) = \
+         ("", "", "", "")
+        reject("%s: Changed-By field ('%s') failed to parse: %s" \
+               % (filename, changes["changed-by"], msg))
+
+    # Ensure all the values in Closes: are numbers
+    if changes.has_key("closes"):
+        for i in changes["closes"].keys():
+            if re_isanum.match (i) == None:
+                reject("%s: `%s' from Closes field isn't a number." % (filename, i))
+
+
+    # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
+    changes["chopversion"] = re_no_epoch.sub('', changes["version"])
+    changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"])
+
+    # Check there isn't already a changes file of the same name in one
+    # of the queue directories.
+    base_filename = os.path.basename(filename)
+    for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
+        if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename):
+            reject("%s: a file with this name already exists in the %s directory." % (base_filename, d))
+
+    # Check the .changes is non-empty
+    if not files:
+        reject("%s: nothing to do (Files field is empty)." % (base_filename))
+        return 0
+
+    return 1
+
+################################################################################
+
+def check_distributions():
+    "Check and map the Distribution field of a .changes file."
+
+    # Handle suite mappings
+    for m in Cnf.ValueList("SuiteMappings"):
+        args = m.split()
+        mtype = args[0]
+        if mtype == "map" or mtype == "silent-map":
+            (source, dest) = args[1:3]
+            if changes["distribution"].has_key(source):
+                del changes["distribution"][source]
+                changes["distribution"][dest] = 1
+                if mtype != "silent-map":
+                    reject("Mapping %s to %s." % (source, dest),"")
+            if changes.has_key("distribution-version"):
+                if changes["distribution-version"].has_key(source):
+                    changes["distribution-version"][source]=dest
+        elif mtype == "map-unreleased":
+            (source, dest) = args[1:3]
+            if changes["distribution"].has_key(source):
+                for arch in changes["architecture"].keys():
+                    if arch not in DBConn().get_suite_architectures(source):
+                        reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
+                        del changes["distribution"][source]
+                        changes["distribution"][dest] = 1
+                        break
+        elif mtype == "ignore":
+            suite = args[1]
+            if changes["distribution"].has_key(suite):
+                del changes["distribution"][suite]
+                reject("Ignoring %s as a target suite." % (suite), "Warning: ")
+        elif mtype == "reject":
+            suite = args[1]
+            if changes["distribution"].has_key(suite):
+                reject("Uploads to %s are not accepted." % (suite))
+        elif mtype == "propup-version":
+            # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
+            #
+            # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
+            if changes["distribution"].has_key(args[1]):
+                changes.setdefault("distribution-version", {})
+                for suite in args[2:]: changes["distribution-version"][suite]=suite
+
+    # Ensure there is (still) a target distribution
+    if changes["distribution"].keys() == []:
+        reject("no valid distribution.")
+
+    # Ensure target distributions exist
+    for suite in changes["distribution"].keys():
+        if not Cnf.has_key("Suite::%s" % (suite)):
+            reject("Unknown distribution `%s'." % (suite))
+
+################################################################################
+
+def check_files():
+    global reprocess
+
+    archive = utils.where_am_i()
+    file_keys = files.keys()
+
+    # if reprocess is 2 we've already done this and we're checking
+    # things again for the new .orig.tar.gz.
+    # [Yes, I'm fully aware of how disgusting this is]
+    if not Options["No-Action"] and reprocess < 2:
+        cwd = os.getcwd()
+        os.chdir(pkg.directory)
+        for f in file_keys:
+            copy_to_holding(f)
+        os.chdir(cwd)
+
+    # Check there isn't already a .changes or .dak file of the same name in
+    # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
+    # [NB: this check must be done post-suite mapping]
+    base_filename = os.path.basename(pkg.changes_file)
+    dot_dak_filename = base_filename[:-8]+".dak"
+    for suite in changes["distribution"].keys():
+        copychanges = "Suite::%s::CopyChanges" % (suite)
+        if Cnf.has_key(copychanges) and \
+               os.path.exists(Cnf[copychanges]+"/"+base_filename):
+            reject("%s: a file with this name already exists in %s" \
+                   % (base_filename, Cnf[copychanges]))
+
+        copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
+        if Cnf.has_key(copy_dot_dak) and \
+               os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename):
+            reject("%s: a file with this name already exists in %s" \
+                   % (dot_dak_filename, Cnf[copy_dot_dak]))
+
+    reprocess = 0
+    has_binaries = 0
+    has_source = 0
+
+    cursor = DBConn().cursor()
+    # Check for packages that have moved from one component to another
+    # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
+    DBConn().prepare("moved_pkg_q", """
+        PREPARE moved_pkg_q(text,text,text) AS
+        SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
+                    component c, architecture a, files f
+        WHERE b.package = $1 AND s.suite_name = $2
+          AND (a.arch_string = $3 OR a.arch_string = 'all')
+          AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
+          AND f.location = l.id
+          AND l.component = c.id
+          AND b.file = f.id""")
+
+    for f in file_keys:
+        # Ensure the file does not already exist in one of the accepted directories
+        for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
+            if not Cnf.has_key("Dir::Queue::%s" % (d)): continue
+            if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f):
+                reject("%s file already exists in the %s directory." % (f, d))
+        if not re_taint_free.match(f):
+            reject("!!WARNING!! tainted filename: '%s'." % (f))
+        # Check the file is readable
+        if os.access(f, os.R_OK) == 0:
+            # When running in -n, copy_to_holding() won't have
+            # generated the reject_message, so we need to.
+            if Options["No-Action"]:
+                if os.path.exists(f):
+                    reject("Can't read `%s'. [permission denied]" % (f))
+                else:
+                    reject("Can't read `%s'. [file not found]" % (f))
+            files[f]["type"] = "unreadable"
+            continue
+        # If it's byhand skip remaining checks
+        if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-":
+            files[f]["byhand"] = 1
+            files[f]["type"] = "byhand"
+        # Checks for a binary package...
+        elif re_isadeb.match(f):
+            has_binaries = 1
+            files[f]["type"] = "deb"
+
+            # Extract package control information
+            deb_file = utils.open_file(f)
+            try:
+                control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
+            except:
+                reject("%s: debExtractControl() raised %s." % (f, sys.exc_type))
+                deb_file.close()
+                # Can't continue, none of the checks on control would work.
+                continue
+
+            # Check for mandantory "Description:"
+            deb_file.seek ( 0 )
+            try:
+                apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
+            except:
+                reject("%s: Missing Description in binary package" % (f))
+                continue
+
+            deb_file.close()
+
+            # Check for mandatory fields
+            for field in [ "Package", "Architecture", "Version" ]:
+                if control.Find(field) == None:
+                    reject("%s: No %s field in control." % (f, field))
+                    # Can't continue
+                    continue
+
+            # Ensure the package name matches the one give in the .changes
+            if not changes["binary"].has_key(control.Find("Package", "")):
+                reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
+
+            # Validate the package field
+            package = control.Find("Package")
+            if not re_valid_pkg_name.match(package):
+                reject("%s: invalid package name '%s'." % (f, package))
+
+            # Validate the version field
+            version = control.Find("Version")
+            if not re_valid_version.match(version):
+                reject("%s: invalid version number '%s'." % (f, version))
+
+            # Ensure the architecture of the .deb is one we know about.
+            default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
+            architecture = control.Find("Architecture")
+            upload_suite = changes["distribution"].keys()[0]
+            if architecture not in DBConn().get_suite_architectures(default_suite) and architecture not in DBConn().get_suite_architectures(upload_suite):
+                reject("Unknown architecture '%s'." % (architecture))
+
+            # Ensure the architecture of the .deb is one of the ones
+            # listed in the .changes.
+            if not changes["architecture"].has_key(architecture):
+                reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
+
+            # Sanity-check the Depends field
+            depends = control.Find("Depends")
+            if depends == '':
+                reject("%s: Depends field is empty." % (f))
+
+            # Sanity-check the Provides field
+            provides = control.Find("Provides")
+            if provides:
+                provide = re_spacestrip.sub('', provides)
+                if provide == '':
+                    reject("%s: Provides field is empty." % (f))
+                prov_list = provide.split(",")
+                for prov in prov_list:
+                    if not re_valid_pkg_name.match(prov):
+                        reject("%s: Invalid Provides field content %s." % (f, prov))
+
+
+            # Check the section & priority match those given in the .changes (non-fatal)
+            if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"):
+                reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ")
+            if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"):
+                reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ")
+
+            files[f]["package"] = package
+            files[f]["architecture"] = architecture
+            files[f]["version"] = version
+            files[f]["maintainer"] = control.Find("Maintainer", "")
+            if f.endswith(".udeb"):
+                files[f]["dbtype"] = "udeb"
+            elif f.endswith(".deb"):
+                files[f]["dbtype"] = "deb"
+            else:
+                reject("%s is neither a .deb or a .udeb." % (f))
+            files[f]["source"] = control.Find("Source", files[f]["package"])
+            # Get the source version
+            source = files[f]["source"]
+            source_version = ""
+            if source.find("(") != -1:
+                m = re_extract_src_version.match(source)
+                source = m.group(1)
+                source_version = m.group(2)
+            if not source_version:
+                source_version = files[f]["version"]
+            files[f]["source package"] = source
+            files[f]["source version"] = source_version
+
+            # Ensure the filename matches the contents of the .deb
+            m = re_isadeb.match(f)
+            #  package name
+            file_package = m.group(1)
+            if files[f]["package"] != file_package:
+                reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"]))
+            epochless_version = re_no_epoch.sub('', control.Find("Version"))
+            #  version
+            file_version = m.group(2)
+            if epochless_version != file_version:
+                reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version))
+            #  architecture
+            file_architecture = m.group(3)
+            if files[f]["architecture"] != file_architecture:
+                reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"]))
+
+            # Check for existent source
+            source_version = files[f]["source version"]
+            source_package = files[f]["source package"]
+            if changes["architecture"].has_key("source"):
+                if source_version != changes["version"]:
+                    reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"]))
+            else:
+                # Check in the SQL database
+                if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
+                    # Check in one of the other directories
+                    source_epochless_version = re_no_epoch.sub('', source_version)
+                    dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
+                    if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
+                        files[f]["byhand"] = 1
+                    elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
+                        files[f]["new"] = 1
+                    else:
+                        dsc_file_exists = 0
+                        for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
+                            if Cnf.has_key("Dir::Queue::%s" % (myq)):
+                                if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
+                                    dsc_file_exists = 1
+                                    break
+                        if not dsc_file_exists:
+                            reject("no source found for %s %s (%s)." % (source_package, source_version, f))
+            # Check the version and for file overwrites
+            reject(Upload.check_binary_against_db(f),"")
+
+            Binary(f, reject).scan_package()
+
+        # Checks for a source package...
+        else:
+            m = re_issource.match(f)
+            if m:
+                has_source = 1
+                files[f]["package"] = m.group(1)
+                files[f]["version"] = m.group(2)
+                files[f]["type"] = m.group(3)
+
+                # Ensure the source package name matches the Source filed in the .changes
+                if changes["source"] != files[f]["package"]:
+                    reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"]))
+
+                # Ensure the source version matches the version in the .changes file
+                if files[f]["type"] == "orig.tar.gz":
+                    changes_version = changes["chopversion2"]
+                else:
+                    changes_version = changes["chopversion"]
+                if changes_version != files[f]["version"]:
+                    reject("%s: should be %s according to changes file." % (f, changes_version))
+
+                # Ensure the .changes lists source in the Architecture field
+                if not changes["architecture"].has_key("source"):
+                    reject("%s: changes file doesn't list `source' in Architecture field." % (f))
+
+                # Check the signature of a .dsc file
+                if files[f]["type"] == "dsc":
+                    dsc["fingerprint"] = utils.check_signature(f, reject)
+
+                files[f]["architecture"] = "source"
+
+            # Not a binary or source package?  Assume byhand...
+            else:
+                files[f]["byhand"] = 1
+                files[f]["type"] = "byhand"
+
+        # Per-suite file checks
+        files[f]["oldfiles"] = {}
+        for suite in changes["distribution"].keys():
+            # Skip byhand
+            if files[f].has_key("byhand"):
+                continue
+
+            # Handle component mappings
+            for m in Cnf.ValueList("ComponentMappings"):
+                (source, dest) = m.split()
+                if files[f]["component"] == source:
+                    files[f]["original component"] = source
+                    files[f]["component"] = dest
+
+            # Ensure the component is valid for the target suite
+            if Cnf.has_key("Suite:%s::Components" % (suite)) and \
+               files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)):
+                reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite))
+                continue
+
+            # Validate the component
+            component = files[f]["component"]
+            component_id = DBConn().get_component_id(component)
+            if component_id == -1:
+                reject("file '%s' has unknown component '%s'." % (f, component))
+                continue
+
+            # See if the package is NEW
+            if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f):
+                files[f]["new"] = 1
+
+            # Validate the priority
+            if files[f]["priority"].find('/') != -1:
+                reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"]))
+
+            # Determine the location
+            location = Cnf["Dir::Pool"]
+            location_id = DBConn().get_location_id(location, component, archive)
+            if location_id == -1:
+                reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
+            files[f]["location id"] = location_id
+
+            # Check the md5sum & size against existing files (if any)
+            files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
+            files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
+            if files_id == -1:
+                reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
+            elif files_id == -2:
+                reject("md5sum and/or size mismatch on existing copy of %s." % (f))
+            files[f]["files id"] = files_id
+
+            # Check for packages that have moved from one component to another
+            files[f]['suite'] = suite
+            cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
+            ql = cursor.fetchone()
+            if ql:
+                files[f]["othercomponents"] = ql[0][0]
+
+    # If the .changes file says it has source, it must have source.
+    if changes["architecture"].has_key("source"):
+        if not has_source:
+            reject("no source found and Architecture line in changes mention source.")
+
+        if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
+            reject("source only uploads are not supported.")
+
+###############################################################################
+
+def check_dsc():
+    global reprocess
+
+    # Ensure there is source to check
+    if not changes["architecture"].has_key("source"):
+        return 1
+
+    # Find the .dsc
+    dsc_filename = None
+    for f in files.keys():
+        if files[f]["type"] == "dsc":
+            if dsc_filename:
+                reject("can not process a .changes file with multiple .dsc's.")
+                return 0
+            else:
+                dsc_filename = f
+
+    # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
+    if not dsc_filename:
+        reject("source uploads must contain a dsc file")
+        return 0
+
+    # Parse the .dsc file
+    try:
+        dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
+    except CantOpenError:
+        # if not -n copy_to_holding() will have done this for us...
+        if Options["No-Action"]:
+            reject("%s: can't read file." % (dsc_filename))
+    except ParseChangesError, line:
+        reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
+    except InvalidDscError, line:
+        reject("%s: syntax error on line %s." % (dsc_filename, line))
+    except ChangesUnicodeError:
+        reject("%s: dsc file not proper utf-8." % (dsc_filename))
+
+    # Build up the file list of files mentioned by the .dsc
+    try:
+        dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
+    except NoFilesFieldError:
+        reject("%s: no Files: field." % (dsc_filename))
+        return 0
+    except UnknownFormatError, format:
+        reject("%s: unknown format '%s'." % (dsc_filename, format))
+        return 0
+    except ParseChangesError, line:
+        reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
+        return 0
+
+    # Enforce mandatory fields
+    for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
+        if not dsc.has_key(i):
+            reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
+            return 0
+
+    # Validate the source and version fields
+    if not re_valid_pkg_name.match(dsc["source"]):
+        reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
+    if not re_valid_version.match(dsc["version"]):
+        reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
+
+    # Bumping the version number of the .dsc breaks extraction by stable's
+    # dpkg-source.  So let's not do that...
+    if dsc["format"] != "1.0":
+        reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
+
+    # Validate the Maintainer field
+    try:
+        utils.fix_maintainer (dsc["maintainer"])
+    except ParseMaintError, msg:
+        reject("%s: Maintainer field ('%s') failed to parse: %s" \
+               % (dsc_filename, dsc["maintainer"], msg))
+
+    # Validate the build-depends field(s)
+    for field_name in [ "build-depends", "build-depends-indep" ]:
+        field = dsc.get(field_name)
+        if field:
+            # Check for broken dpkg-dev lossage...
+            if field.startswith("ARRAY"):
+                reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
+
+            # Have apt try to parse them...
+            try:
+                apt_pkg.ParseSrcDepends(field)
+            except:
+                reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
+                pass
+
+    # Ensure the version number in the .dsc matches the version number in the .changes
+    epochless_dsc_version = re_no_epoch.sub('', dsc["version"])
+    changes_version = files[dsc_filename]["version"]
+    if epochless_dsc_version != files[dsc_filename]["version"]:
+        reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
+
+    # Ensure there is a .tar.gz in the .dsc file
+    has_tar = 0
+    for f in dsc_files.keys():
+        m = re_issource.match(f)
+        if not m:
+            reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
+            continue
+        ftype = m.group(3)
+        if ftype == "orig.tar.gz" or ftype == "tar.gz":
+            has_tar = 1
+    if not has_tar:
+        reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
+
+    # Ensure source is newer than existing source in target suites
+    reject(Upload.check_source_against_db(dsc_filename),"")
+
+    (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
+    reject(reject_msg, "")
+    if is_in_incoming:
+        if not Options["No-Action"]:
+            copy_to_holding(is_in_incoming)
+        orig_tar_gz = os.path.basename(is_in_incoming)
+        files[orig_tar_gz] = {}
+        files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
+        files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
+        files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
+        files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
+        files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
+        files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
+        files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
+        files[orig_tar_gz]["type"] = "orig.tar.gz"
+        reprocess = 2
+
+    return 1
+
+################################################################################
+
+def get_changelog_versions(source_dir):
+    """Extracts a the source package and (optionally) grabs the
+    version history out of debian/changelog for the BTS."""
+
+    # Find the .dsc (again)
+    dsc_filename = None
+    for f in files.keys():
+        if files[f]["type"] == "dsc":
+            dsc_filename = f
+
+    # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
+    if not dsc_filename:
+        return
+
+    # Create a symlink mirror of the source files in our temporary directory
+    for f in files.keys():
+        m = re_issource.match(f)
+        if m:
+            src = os.path.join(source_dir, f)
+            # If a file is missing for whatever reason, give up.
+            if not os.path.exists(src):
+                return
+            ftype = m.group(3)
+            if ftype == "orig.tar.gz" and pkg.orig_tar_gz:
+                continue
+            dest = os.path.join(os.getcwd(), f)
+            os.symlink(src, dest)
+
+    # If the orig.tar.gz is not a part of the upload, create a symlink to the
+    # existing copy.
+    if pkg.orig_tar_gz:
+        dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
+        os.symlink(pkg.orig_tar_gz, dest)
+
+    # Extract the source
+    cmd = "dpkg-source -sn -x %s" % (dsc_filename)
+    (result, output) = commands.getstatusoutput(cmd)
+    if (result != 0):
+        reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
+        reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
+        return
+
+    if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
+        return
+
+    # Get the upstream version
+    upstr_version = re_no_epoch.sub('', dsc["version"])
+    if re_strip_revision.search(upstr_version):
+        upstr_version = re_strip_revision.sub('', upstr_version)
+
+    # Ensure the changelog file exists
+    changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
+    if not os.path.exists(changelog_filename):
+        reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
+        return
+
+    # Parse the changelog
+    dsc["bts changelog"] = ""
+    changelog_file = utils.open_file(changelog_filename)
+    for line in changelog_file.readlines():
+        m = re_changelog_versions.match(line)
+        if m:
+            dsc["bts changelog"] += line
+    changelog_file.close()
+
+    # Check we found at least one revision in the changelog
+    if not dsc["bts changelog"]:
+        reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
+
+########################################
+
+def check_source():
+    # Bail out if:
+    #    a) there's no source
+    # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
+    # or c) the orig.tar.gz is MIA
+    if not changes["architecture"].has_key("source") or reprocess == 2 \
+       or pkg.orig_tar_gz == -1:
+        return
+
+    tmpdir = utils.temp_dirname()
+
+    # Move into the temporary directory
+    cwd = os.getcwd()
+    os.chdir(tmpdir)
+
+    # Get the changelog version history
+    get_changelog_versions(cwd)
+
+    # Move back and cleanup the temporary tree
+    os.chdir(cwd)
+    try:
+        shutil.rmtree(tmpdir)
+    except OSError, e:
+        if errno.errorcode[e.errno] != 'EACCES':
+            utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
+
+        reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
+        # We probably have u-r or u-w directories so chmod everything
+        # and try again.
+        cmd = "chmod -R u+rwx %s" % (tmpdir)
+        result = os.system(cmd)
+        if result != 0:
+            utils.fubar("'%s' failed with result %s." % (cmd, result))
+        shutil.rmtree(tmpdir)
+    except:
+        utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
+
+################################################################################
+
+# FIXME: should be a debian specific check called from a hook
+
+def check_urgency ():
+    if changes["architecture"].has_key("source"):
+        if not changes.has_key("urgency"):
+            changes["urgency"] = Cnf["Urgency::Default"]
+        # Urgency may be followed by space & comment (policy 5.6.17)
+        changes["urgency"] = changes["urgency"].split(" ")[0].lower();
+        if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
+            reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
+            changes["urgency"] = Cnf["Urgency::Default"]
+
+################################################################################
+
+def check_hashes ():
+    utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
+    utils.check_size(".changes", files)
+    utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
+    utils.check_size(".dsc", dsc_files)
+
+    # This is stupid API, but it'll have to do for now until
+    # we actually have proper abstraction
+    for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
+        reject(m)
+
+################################################################################
+
+# Sanity check the time stamps of files inside debs.
+# [Files in the near future cause ugly warnings and extreme time
+#  travel can cause errors on extraction]
+
+def check_timestamps():
+    class Tar:
+        def __init__(self, future_cutoff, past_cutoff):
+            self.reset()
+            self.future_cutoff = future_cutoff
+            self.past_cutoff = past_cutoff
+
+        def reset(self):
+            self.future_files = {}
+            self.ancient_files = {}
+
+        def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
+            if MTime > self.future_cutoff:
+                self.future_files[Name] = MTime
+            if MTime < self.past_cutoff:
+                self.ancient_files[Name] = MTime
+    ####
+
+    future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
+    past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
+    tar = Tar(future_cutoff, past_cutoff)
+    for filename in files.keys():
+        if files[filename]["type"] == "deb":
+            tar.reset()
+            try:
+                deb_file = utils.open_file(filename)
+                apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
+                deb_file.seek(0)
+                try:
+                    apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
+                except SystemError, e:
+                    # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
+                    if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
+                        raise
+                    deb_file.seek(0)
+                    apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
+                deb_file.close()
+                #
+                future_files = tar.future_files.keys()
+                if future_files:
+                    num_future_files = len(future_files)
+                    future_file = future_files[0]
+                    future_date = tar.future_files[future_file]
+                    reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
+                           % (filename, num_future_files, future_file,
+                              time.ctime(future_date)))
+                #
+                ancient_files = tar.ancient_files.keys()
+                if ancient_files:
+                    num_ancient_files = len(ancient_files)
+                    ancient_file = ancient_files[0]
+                    ancient_date = tar.ancient_files[ancient_file]
+                    reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
+                           % (filename, num_ancient_files, ancient_file,
+                              time.ctime(ancient_date)))
+            except:
+                reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
+
+################################################################################
+
+def lookup_uid_from_fingerprint(fpr):
+    """
+    Return the uid,name,isdm for a given gpg fingerprint
+
+    @type fpr: string
+    @param fpr: a 40 byte GPG fingerprint
+
+    @return: (uid, name, isdm)
+    """
+    cursor = DBConn().cursor()
+    cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
+    qs = cursor.fetchone()
+    if qs:
+        return qs
+    else:
+        return (None, None, False)
+
+def check_signed_by_key():
+    """Ensure the .changes is signed by an authorized uploader."""
+
+    (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
+    if uid_name == None:
+        uid_name = ""
+
+    # match claimed name with actual name:
+    if uid is None:
+        # This is fundamentally broken but need us to refactor how we get
+        # the UIDs/Fingerprints in order for us to fix it properly
+        uid, uid_email = changes["fingerprint"], uid
+        may_nmu, may_sponsor = 1, 1
+        # XXX by default new dds don't have a fingerprint/uid in the db atm,
+        #     and can't get one in there if we don't allow nmu/sponsorship
+    elif is_dm is False:
+        # If is_dm is False, we allow full upload rights
+        uid_email = "%s@debian.org" % (uid)
+        may_nmu, may_sponsor = 1, 1
+    else:
+        # Assume limited upload rights unless we've discovered otherwise
+        uid_email = uid
+        may_nmu, may_sponsor = 0, 0
+
+
+    if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
+        sponsored = 0
+    elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
+        sponsored = 0
+        if uid_name == "": sponsored = 1
+    else:
+        sponsored = 1
+        if ("source" in changes["architecture"] and
+            uid_email and utils.is_email_alias(uid_email)):
+            sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"])
+            if (changes["maintaineremail"] not in sponsor_addresses and
+                changes["changedbyemail"] not in sponsor_addresses):
+                changes["sponsoremail"] = uid_email
+
+    if sponsored and not may_sponsor:
+        reject("%s is not authorised to sponsor uploads" % (uid))
+
+    cursor = DBConn().cursor()
+    if not sponsored and not may_nmu:
+        source_ids = []
+        cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
+
+        highest_sid, highest_version = None, None
+
+        should_reject = True
+        while True:
+            si = cursor.fetchone()
+            if not si:
+                break
+
+            if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
+                 highest_sid = si[0]
+                 highest_version = si[1]
+
+        if highest_sid == None:
+           reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
+        else:
+
+            cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
+
+            while True:
+                m = cursor.fetchone()
+                if not m:
+                    break
+
+                (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
+                if email == uid_email or name == uid_name:
+                    should_reject=False
+                    break
+
+        if should_reject == True:
+            reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
+
+        for b in changes["binary"].keys():
+            for suite in changes["distribution"].keys():
+                suite_id = DBConn().get_suite_id(suite)
+
+                cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
+                while True:
+                    s = cursor.fetchone()
+                    if not s:
+                        break
+
+                    if s[0] != changes["source"]:
+                        reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
+
+        for f in files.keys():
+            if files[f].has_key("byhand"):
+                reject("%s may not upload BYHAND file %s" % (uid, f))
+            if files[f].has_key("new"):
+                reject("%s may not upload NEW file %s" % (uid, f))
+
+
+################################################################################
+################################################################################
+
+# If any file of an upload has a recent mtime then chances are good
+# the file is still being uploaded.
+
+def upload_too_new():
+    too_new = 0
+    # Move back to the original directory to get accurate time stamps
+    cwd = os.getcwd()
+    os.chdir(pkg.directory)
+    file_list = pkg.files.keys()
+    file_list.extend(pkg.dsc_files.keys())
+    file_list.append(pkg.changes_file)
+    for f in file_list:
+        try:
+            last_modified = time.time()-os.path.getmtime(f)
+            if last_modified < int(Cnf["Dinstall::SkipTime"]):
+                too_new = 1
+                break
+        except:
+            pass
+    os.chdir(cwd)
+    return too_new
+
+################################################################################
+
+def action ():
     # changes["distribution"] may not exist in corner cases
     # (e.g. unreadable changes files)
     if not u.pkg.changes.has_key("distribution") or not isinstance(u.pkg.changes["distribution"], DictType):
index 005d109eadfa1f0d1bd8c3af99abcc44feea4b26..9c4e7d8bc7e97010b23d0dc1b21f349e723f2c93 100755 (executable)
@@ -84,9 +84,7 @@ def init():
     if Options["help"]:
         usage()
 
-    whoami = os.getuid()
-    whoamifull = pwd.getpwuid(whoami)
-    username = whoamifull[0]
+    username = utils.getusername()
     if username != "dak":
         print "Non-dak user: %s" % username
         Options["sudo"] = "y"
index fe7d1931c79c911604c31527fc660c22809d3448..cbdfad04e5a4504f204210802bdc5efc9236a75f 100755 (executable)
@@ -849,8 +849,8 @@ def has_new_comment(package, version, ignore_trainee=False):
     @type version: string
     @param version: package version
 
-    @type version: boolean
-    @param version: ignore trainee comments
+    @type ignore_trainee: boolean
+    @param ignore_trainee: ignore trainee comments
 
     @rtype: boolean
     @return: true/false
index dc201e3f9a61ff4dcced2658db3f41e570a1c032..a9dea9201cd7e25aa7373461eccfd901468022c8 100755 (executable)
@@ -696,6 +696,9 @@ def warn(msg):
 def whoami ():
     return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
 
+def getusername ():
+    return pwd.getpwuid(os.getuid())[0]
+
 ################################################################################
 
 def size_type (c):
index c0e831f3df23a4f5fc09135c39c3998e27b58a7c..a20cfc9f06d548520d807d8c2b847d5f4275f02d 100644 (file)
@@ -243,8 +243,8 @@ dak and others.  It should indicate the name of the distribution.
 BugServer (required): is used by 'dak process-unchecked' and 'dak rm'
 when closing bugs.
 
-PackagesServer (requried): used by 'dak rm' when carbon-copying a bug
-close mail to a package maintainer.
+PackagesServer (required): used by 'dak rm' and 'dak override' when
+carbon-copying a bug close mail to a package maintainer.
 
 TrackingServer (optional): used by 'dak process-unchecked' and 'dak
 rm' to send messages for the maintainer also to an alias for people
diff --git a/docs/talks/DebConf9/Makefile b/docs/talks/DebConf9/Makefile
new file mode 100644 (file)
index 0000000..39f2321
--- /dev/null
@@ -0,0 +1,28 @@
+# $Id: $
+
+LATEX = latex
+PDFLATEX = pdflatex
+DVIPS = dvips
+BIBTEX = bibtex
+PDFVIEW = xpdf -fullscreen
+
+SRC := $(shell egrep -l '^[^%]*\\begin\{document\}' *.tex)
+DVI = $(SRC:%.tex=%.dvi)
+PDF = $(SRC:%.tex=%.pdf)
+PS  = $(SRC:%.tex=%.ps)
+
+all: pdf
+
+$(PDF): %.pdf : %.tex
+       # call two time because of toc etc
+       @$(PDFLATEX) $<
+       @$(PDFLATEX) $<
+       @$(PDFLATEX) $<
+
+show:
+       $(PDFVIEW) $(PDF)
+
+pdf: $(PDF)
+
+clean:
+       -rm -f $(DVI) $(PDF) $(DVI:%.dvi=%.aux) $(DVI:%.dvi=%.log) $(DVI:%.dvi=%.out) $(DVI:%.dvi=%.toc) $(DVI:%.dvi=%.nav) $(DVI:%.dvi=%.snm)
diff --git a/docs/talks/DebConf9/background.jpg b/docs/talks/DebConf9/background.jpg
new file mode 100644 (file)
index 0000000..d20a9c5
Binary files /dev/null and b/docs/talks/DebConf9/background.jpg differ
diff --git a/docs/talks/DebConf9/ftpmaster.pdf b/docs/talks/DebConf9/ftpmaster.pdf
new file mode 100644 (file)
index 0000000..af4e916
Binary files /dev/null and b/docs/talks/DebConf9/ftpmaster.pdf differ
diff --git a/docs/talks/DebConf9/ftpmaster.tex b/docs/talks/DebConf9/ftpmaster.tex
new file mode 100644 (file)
index 0000000..30a5a3e
--- /dev/null
@@ -0,0 +1,432 @@
+\documentclass{beamer}
+%\documentclass[draft]{beamer}
+
+\usepackage[german]{babel}
+\usepackage[latin1]{inputenc}
+\usepackage{times}
+\usepackage[T1]{fontenc}
+\usepackage{url}
+
+\mode<presentation>{
+  \usetheme{Madrid}
+  \hypersetup{pdfpagemode=FullScreen}
+  \usecolortheme{albatross}
+  \useinnertheme[shadow]{rounded}
+  \usefonttheme{serif}
+  \usefonttheme{structurebold}
+% \beamerdefaultoverlayspecification{<+-| alert@+>}
+%  \setbeamercovered{transparent}
+}
+
+\title[FTPTeam]{The Debian FTP Team}
+\subtitle{Masters, Assistants, Trainees \\ \alert{Motto: Fuck it! What could possibly go wrong?}}
+
+\author[Joerg Jaspert]{Joerg Jaspert \\ \texttt{joerg@debian.org}}
+\date{Cáceres, July 2009}
+
+\institute[DebConf9]{Debian Conference 2009}
+
+% \AtBeginSection[]
+% {
+%   \begin{frame}<beamer>{Outline}
+%     \tableofcontents[currentsection] %,currentsubsection]
+%   \end{frame}
+% }
+
+
+\begin{document}
+
+\setbeamertemplate{navigation symbols}{}
+{\setbeamertemplate{background}{\includegraphics[width=\paperwidth]{background.jpg}}
+
+\begin{frame}
+  \titlepage
+\end{frame}
+
+\begin{frame}[allowframebreaks,allowdisplaybreaks]{Outline}
+\tiny
+  \tableofcontents
+\end{frame}
+
+\section{History}
+\subsection{Software / Hosting}
+\begin{frame}{History - Scripts}{\alert{Motto: Fuck it! What could possibly go wrong?}}
+  \begin{itemize}
+  \item Ian Murdock/Jackson makeshift scripts
+  \item Monolithic perl dinstall written by Guy Maor
+    \begin{itemize}
+    \item Not exactly secure: upload, shipit
+    \item Not even using PGP either.
+    \end{itemize}
+  \item Katie: a rewrite in python. Known as DAK.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{History - Hosting}{\alert{Oh fuck, what just went wrong?}}
+  \begin{itemize}
+  \item Under a desk @ cmich.edu
+  \item First master.d.o by Bdale @ HP
+
+{\tiny It was an HP Vectra 486 tower system with 66mhz CPU upgrade, an
+  Adaptec 1740A EISA SCSI controller and two disk drives.  A 330Mb
+  root disk and a 660Mb disk to hold the archive.  Both were 5.25"
+  full-height drives.  All the pieces came off pallets of materials
+  heading out for scrap. Before it left Bdale the archive disk got
+  swapped out for a 1.3Gb drive, also 5.25" full height scsi and a
+  cast-off heading for scrap.
+
+  The first machine to host .debian.org using Debian.}
+
+\pause
+
+  \item Moved to I-Connect
+  \item Then to Novare for several years
+  \item Then to Visi.Net, as 'ftp-master.d.o'
+  \item Relocated to above.net
+  \item Then to the HP DC in Ft. Collins
+  \item Currently at Brown.edu
+  \end{itemize}
+
+\end{frame}
+
+
+\subsection{Archive}
+\begin{frame}{History - Archive}{\alert{That guy impressed me and I am not easily impressed. Wow. BLUE Hair }}
+  \begin{itemize}
+    \item Architectures / Releases:
+      \begin{description}
+        \item[Bo] 1: i386
+        \item[Hamm] 2: + m68k
+        \item[Slink] 4: + sparc, alpha
+        \item[Potato] 6: + powerpc, arm
+        \item[Woody] 11: + mips, mipsel, hppa, ia64, s390
+        \item[Sarge] 11 (unofficial amd64 release)
+        \item[Etch] 11: + amd64, - m68k (there is a semi-official etch-m68k release)
+        \item[Lenny] 12: + armel
+        \item[Squeeze] ?: + kfreebsd-i386, kfreebsd-amd64, - arm, ...
+      \end{description}
+
+    \item  Proposed Architectures:
+      \begin{itemize}
+      \item avr32
+      \item sh\{3,4\}
+      \end{itemize}
+    \end{itemize}
+\end{frame}
+
+\begin{frame}{History - Archive size}{\alert{Ganneff - if it goes wrong, we make it ``Fuck it what do we care?'', but dont tell anyone.}}
+  \begin{itemize}
+  \item Releases:
+    \begin{description}
+    \item[Buzz] 229Mb
+    \item[Rex] 306Mb
+    \item[Bo] 1.2Gb
+    \item[Hamm] 2.1Gb
+    \item[Slink] 4.1Gb
+    \item[Potato] 6.7Gb
+    \item[Woody] 27Gb
+    \item[Sarge] 57Gb
+    \item[Etch] 82Gb
+    \item[Lenny] 125Gb
+    \item[squeeze+X] 1Tb?
+    \end{description}
+
+\pause
+
+  \item Daily pushes:
+    \begin{description}
+    \item[2005] 200Mb up to 2Gb, average 1Gb. Once a day.
+    \item[2009] 1Gb up to 6Gb, average 2Gb. 4 times a day.
+    \end{description}
+  \end{itemize}
+\end{frame}
+
+\subsection{Team}
+\begin{frame}{History - Team}{\alert{sgran - the world B.G. (Before Ganneff)}}
+  \begin{itemize}
+  \item Ian Murdock
+  \item + later Ian Jackson, Bruce Perens, Bdale Garbee
+  \item Guy Maor takes over (moved to dinstall) [somewhere 1995/1996]
+  \item Richard Braakman and James Troup push in [July 1998] % <elmo> so 1998-07-09, looks like a good date for Richard + Me
+  \item James recruits Antti-Juhani Kaijanaho and Gergely Madarasz [Dec 1999]
+  \item Michael Beattie tricks his way in [August 2000]
+  \item Richard becomes inactive [August 2000]
+  \item Work on katie starts (named da-katie) [2000]
+  \item Anthony Towns joins [January 2001]
+  \item Guy becomes inactive [February 2001]
+  \item Ryan Murray is added [June 2001]
+  \item Randall Donald joins [November 2001]
+  \item Daniel Silverstone follows [August 2003]
+  \end{itemize}
+\end{frame}
+
+% Help from elmo for the dates:
+
+% From: Guy Maor <maor@debian.org>
+% Subject: Re: ftpmaster-y stuff
+% To: James Troup <james@nocrew.org>
+% Date: 21 Nov 1999 17:03:08 -0800
+
+% James Troup <james@nocrew.org> writes:
+
+% > I've started being a bit pro-active with ftpmaster stuff, e.g. getting
+% > Gorgo and Ibid on board and announcing it and stuff.  I'm doing this
+% > on the basis that, you don't mind, and if you did you'd say.  Of
+% > course this fails a bit miserably if you don't [say, that is], so
+% > please do tell me if I'm overstepping my bounds...
+
+% I trust you and Richard completely with the ftpmaster stuff.  You have
+% the authority to do anything as far as I'm concerned.
+
+% ------------------------------------------------------------------------
+
+% Guy
+
+% From: Richard Braakman <dark@xs4all.nl>
+% Subject: Re: New ftpmaster (?) Michael Beattie (aka Omnic)
+% To: James Troup <james@nocrew.org>
+% Date: Wed, 16 Aug 2000 13:27:29 +0200
+
+% On Sun, Aug 06, 2000 at 10:29:27PM +0100, James Troup wrote:
+% > person... Omnic is excessively keen and seems to have lots of free
+% > time... unless you have any objections? I'll ask him to join the team
+% > in a couple of days.
+
+% No.  In fact I'm almost writing my retirement notice now -- I'm just working
+% through the current pile of mail before sending anything.
+
+
+\begin{frame}{History - Team}{\alert{mhy - commit it (RE: team motto)}}
+  \begin{itemize}
+  \item Team restructured, Assistants role added [March 2005]
+  \item Joerg Jaspert and Jeroen van Wolffelaar join as Assistants [March 2005]
+  \item Kalle Kivimaa and Thomas Viehmann join as Assistants [February 2008]
+  \item Joerg Jaspert promoted to FTP Master [April 2008]
+  \item Anthony Towns and James Troup leave [April/May 2008]
+  \item Mark Hymers joins as Assistant [July 2008]
+  \item Frank Lichtenheld joins as Assistant [December 2008]
+  \item Thomas Viehmann resigned [December 2008]
+  \item Mike O'Connor joins as Assistant [January 2009]
+  \item Ryan Murray becomes inactive and leaves [March 2009]
+  \item Mark Hymers promoted to FTP Master [March 2009]
+  \item Kalle Kivimaa leaves [July 2009]
+  \end{itemize}
+\end{frame}
+
+\subsection{Present}
+\begin{frame}{The Team}{\alert{Me fail English? That's unpossible.}}
+  FTP Masters
+  \begin{itemize}
+  \item Joerg Jaspert - Ganneff
+  \item Mark Hymers - mhy
+  \end{itemize}
+
+  FTP Assistants
+  \begin{itemize}
+  \item Frank Lichtenheld - djpig
+  \item Mike O'Connor - stew
+  \end{itemize}
+
+  FTP Trainee
+  \begin{itemize}
+  \item Barry deFreese - bdefreese
+  \item You?!
+  \end{itemize}
+
+  Since April 2008 we almost always had people in the FTP Trainee role.
+\end{frame}
+
+\section{The roles}
+\subsection{FTP Master}
+\begin{frame}{FTP Master}{\alert{Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head.}}
+The FTP Master role, unix group \alert{debadmin}, is ...
+ \begin{block}{(Lets cite Matthew Garrett)}
+   ... responsible for maintaining the infrastructure
+   required to support the archive. This takes the form of the scripts used
+   for processing uploaded packages, but also the flow of packages between
+   distributions.
+ \end{block}
+ \begin{itemize}
+ \item Keep the archive running
+ \item Keep the archive legal
+ \item Support the teams that depend on it (Release, Security, soon backports/volatile)
+ \item Keep the archive uptodate with the requirements of the project
+ \end{itemize}
+\end{frame}
+
+\subsection{FTP Assistant}
+\begin{frame}{FTP Assistant}{\alert{Ganneff - see topic. our motto. yeah.}}
+The FTP Assistant role, group \alert{ftpteam}, created 2005, allows
+additions of people to the FTP Team without having to hand out full FTP
+Master rights.
+
+It allows
+\begin{itemize}
+\item to process NEW,
+\item to handle overrides,
+\item to remove packages.
+\end{itemize}
+
+Thus the most common actions can be done but the possible damage that
+can happen by accident is limited.
+\end{frame}
+
+\subsection{FTP Trainee}
+\begin{frame}{FTP Trainee}{\alert{Kids, you tried your best and you failed miserably. The lesson is, never try. \tiny \\ Hello i18n cabal. :)}}
+The FTP Trainee role, group \alert{ftptrainee}, was created in 2008 to
+allow easy training and testing of future team members.
+
+\begin{itemize}
+\item Initially called ``slave\_NEW''
+
+\pause
+
+\item 6 people trained so far
+\item 2 promoted to Assistants
+\end{itemize}
+
+Trainees can look at NEW and do the usual package checks, but they can
+not actually accept or reject a package. Instead they leave a note,
+which an Assistant or Master reads and acts on.
+\end{frame}
+
+\section{The job}
+\subsection{Archives}
+\begin{frame}{Archives}{\alert{You can't keep blaming yourself. Just blame yourself once, and move on.}}
+  \begin{itemize}
+  \item The main archive, ftp.debian.org
+  \item The security archive, security.debian.org
+  \item (Soon) The volatile archive, volatile.debian.org (integrated
+    into main archive)
+  \item (Soon) The backports.org archive, www.backports.org (-> backports.debian.org)
+  \end{itemize}
+\end{frame}
+
+\subsection{Removals and Overrides}
+\begin{frame}{Removals and Overrides}{\alert{mhy - I was upset to discover (via grepping my logs) that I actually said that quote in one of the subtitles verbatim}}
+  \begin{itemize}
+  \item Removals
+    \begin{itemize}
+    \item (Semi)-Automagic via cruft-report
+    \item Manual, with help from a nice html page
+    \end{itemize}
+  \item Overrides
+    \begin{itemize}
+    \item Priority / Section
+    \item Override disparity
+    \item Currently by mail, should be changed to using BTS
+    \end{itemize}
+  \end{itemize}
+\end{frame}
+
+\subsection{NEW}
+\begin{frame}{NEW}{\alert{mhy - Ganneff airlines: departing from a window near you}}
+
+NEW checking is about three things. In order of priority:
+\begin{itemize}
+\item trying to keep the archive legal,
+\item trying to keep the package namespace sane,
+\item trying to reduce the number of bugs in Debian.
+\end{itemize}
+
+A small list of most common checks
+\begin{itemize}
+\item Check the source for license problems
+  \begin{itemize}
+  \item incompatibility
+  \item non-free license terms
+  \end{itemize}
+\item (sane) package renames / splits
+\item Policy violation
+\item source for everything included?
+\item ...
+\end{itemize}
+\end{frame}
+
+\subsection{Code}
+\begin{frame}{Code}{\alert{Oh, so they have Internet on computers now!}}
+git clone https://ftp-master.debian.org/git/dak.git
+
+\begin{itemize}
+\item Python
+\item and some Shell
+\item and some Perl
+\item at least 9 years old
+\item at minimum 40 different authors
+\end{itemize}
+\end{frame}
+
+\section{How to help / join}
+
+\subsection{QA Team}
+\begin{frame}{Join the Army, err, QA Team}{\alert{mhy - oh god. Ganneff has been on TV? I'm surprised the sets didn't all explode}}
+  \begin{itemize}
+  \item Do QA work
+  \item File removal bugs for unmaintained, outdated or otherwise no longer needed packages
+  \end{itemize}
+\end{frame}
+
+\subsection{Bug triage}
+\begin{frame}{Bug triage}{\alert{I am so smart! I am so smart! S-M-R-T! I mean S-M-A-R-T...}}
+  \begin{itemize}
+  \item Retitle bugs to fit our removal format
+  \item Close bugs that don't apply (anymore)
+  \item Make sure removals can happen (rev-deps anyone?)
+  \end{itemize}
+\end{frame}
+
+\subsection{Write Code}
+\begin{frame}{Write Code}{\alert{I'm normally not a praying man, but if you're up there, please save me Superman.}}
+
+\end{frame}
+
+\subsection{Join the team}
+\begin{frame}{Join the team}{\alert{Hi, my name's Moe. Or as the ladies like to refer to me, 'hey you in the bushes'}}
+Join us. Mail \href{mailto:ftpmaster@debian.org}{ftpmaster@debian.org}.
+
+Join us. Mail \href{mailto:ftpmaster@debian.org}{ftpmaster@debian.org}.
+
+Join us. Mail \href{mailto:ftpmaster@debian.org}{ftpmaster@debian.org}.
+
+Join us. Mail \href{mailto:ftpmaster@debian.org}{ftpmaster@debian.org} NOW.
+\end{frame}
+
+\section{Final}
+
+\subsection{The future}
+\begin{frame}{The future}{\alert{Facts are meaningless. You could use facts to prove anything that's even remotely true!}}
+  \begin{itemize}
+  \item Replace database code in dak with sqlalchemy
+  \item source v3
+  \item Split long description out of Packages files, make it nothing
+    than another translation
+  \item debtags integration, sections death
+  \item tdebs
+  \item ddebs
+  \item lintian autorejects
+  \item autosigning
+  \item binary throw away
+  \item ...
+  \end{itemize}
+\end{frame}
+
+
+\begin{frame}{Thank you for sleeping through this talk}{\alert{Ganneff - (trust full, when had i been that drunk?)}}
+  \begin{center}
+  Joerg Jaspert \\
+  \href{mailto:ftpmaster@debian.org}{ftpmaster@debian.org}
+
+  \href{http://ftp-master.debian.org/}{http://ftp-master.debian.org/}
+
+  \href{https://ftp-master.debian.org/git/dak.git}{https://ftp-master.debian.org/git/dak.git}
+
+
+
+  \vfill
+
+
+  \end{center}
+\end{frame}
+
+
+\end{document}
diff --git a/scripts/debian/byhand-dm b/scripts/debian/byhand-dm
deleted file mode 100755 (executable)
index da7ddd4..0000000
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/bin/sh -e
-
-BYHAND="$1"
-VERSION="$2"
-ARCH="$3"
-CHANGES="$4"
-
-KEYRING=/srv/keyring.debian.org/keyrings/debian-keyring.gpg
-
-DESTKR=/srv/ftp.debian.org/keyrings/debian-maintainers.gpg
-
-get_id () {
-  echo "SELECT U.name, U.uid FROM fingerprint F JOIN uid U ON (F.uid = U.id) WHERE F.fingerprint = '$1';" |
-    psql projectb -At |
-    sed 's/|\(.*\)/ <\1@debian.org>/'
-}
-
-is_allowed () {
-  echo "SELECT M.name from src_uploaders U join source S on (U.source = S.id) join maintainer M on (U.maintainer = M.id) WHERE S.source = 'debian-maintainers';" |
-    psql projectb -At | 
-    while read ALLOWED; do
-      if [ "$1" = "$ALLOWED" ]; then
-        echo yes
-       break
-      fi
-    done
-}
-
-FPRINT=$(gpgv --keyring "$KEYRING" --status-fd 3 3>&1 >/dev/null 2>&1 "$CHANGES" |
-    cut -d\  -f2,3 | grep ^VALIDSIG | head -n1 | cut -d\  -f2)
-
-ID="$(get_id "$FPRINT")"
-
-if [ "$(is_allowed "$ID")" != "yes" ]; then
-  echo "Unauthorised upload by $ID"
-  exit 1
-fi
-
-echo "Authorised upload by $ID, copying into place"
-
-OUT=$(mktemp)
-
-cp "$BYHAND" "$DESTKR"
-dak import-keyring --generate-users "%s" "$DESTKR" >$OUT
-
-if [ -s "$OUT" ]; then
-  /usr/sbin/sendmail -odq -oi -t <<EOF
-From: $ID
-To: <debian-project@lists.debian.org>
-Subject: Updated Debian Maintainers Keyring
-Content-Type: text/plain; charset=utf-8
-MIME-Version: 1.0
-
-With the upload of debian-maintainers version $VERSION, the following
-changes to the keyring have been made:
-
-$(cat $OUT)
-
-A summary of all the changes in this upload follows.
-
-Debian distribution maintenance software,
-on behalf of,
-$ID
-
-$(cat $CHANGES)
-EOF
-fi
-rm -f "$OUT"
-
-exit 0
diff --git a/scripts/debian/ftpstats.R b/scripts/debian/ftpstats.R
new file mode 100644 (file)
index 0000000..62846c7
--- /dev/null
@@ -0,0 +1,20 @@
+arch <- c("source", "all", "amd64", "i386", "alpha", "arm", "armel", "hppa", "hurd-i386", "ia64",
+       "mips", "mipsel", "powerpc", "s390", "sparc")
+palette(c("midnightblue", "gold", "turquoise", "cyan", "black", "red", "OrangeRed", "green3", "blue",
+       "magenta", "tomato4",
+        "violetred2","thistle4", "steelblue2", "springgreen4",
+       "salmon","gray"))
+cname <- c("date",arch)
+plotsize <- function(file,title,p,height=11.8,width=16.9) {
+       bitmap(file=file,type="png16m",width=16.9,height=11.8)
+       barplot(t(p),col = 1:15, main=title,
+               xlab="date", ylab="size (MiB)")
+       legend(par("usr")[1]+xinch(5),par("usr")[4]-yinch(0.1),legend=colnames(t),
+               ncol=3,fill=1:15,xjust=1,yjust=1)
+}
+t <- (read.table("/org/ftp.debian.org/misc/ftpstats.data",sep=",",header=0,row.names=1,col.names=cname))/1024/1024
+v <- t[(length(t$all)-90):(length(t$all)),1:15]
+
+#plotsize("/org/ftp.debian.org/web/size.png","Daily dinstall run size by arch",t)
+plotsize("/org/ftp.debian.org/web/size-quarter.png","Daily dinstall run size by arch (past quarter)",v)
+
index 725938d3e370b3f89ea227b9def5b9386da9920c..7e521e8afb35e2b734e2b2511e30688b3f5ad487 100644 (file)
@@ -21,10 +21,11 @@ master archive (__MASTER_ARCHIVE__) and will not propagate to any
 mirrors (__PRIMARY_MIRROR__ included) until the next cron.daily run at the
 earliest.
 
-Packages are never removed from testing by hand.  Testing tracks
+Packages are usually not removed from testing by hand. Testing tracks
 unstable and will automatically remove packages which were removed
 from unstable when removing them from testing causes no dependency
-problems.
+problems. The release team can force a removal from testing if it is
+really needed, please contact them if this should be the case.
 
 Bugs which have been reported against this package are not automatically
 removed from the Bug Tracking System.  Please check all open bugs and
index 6de4931a23f48cd270489fc2d731b5ce3eda7d25..e6ba85aec7c06767d7f734459414dbee0b3d58e1 100644 (file)
@@ -65,7 +65,7 @@ $statusdelay = 30;
 # names of the keyring files
 @keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
               "/srv/keyring.debian.org/keyrings/debian-keyring.pgp",
-              "/srv/ftp.debian.org/keyrings/debian-maintainers.gpg" );
+              "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg" );
 
 # our log file
 $logfile = "$queued_dir/log";
index 564fa5983b50a319d49b3f2e9e6e6a741456447b..633972fa860ee5e968d99c1b8c0222290c45eb32 100644 (file)
@@ -64,7 +64,8 @@ $statusdelay = 30;
 
 # names of the keyring files
 @keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
-              "/srv/keyring.debian.org/keyrings/debian-keyring.pgp");
+              "/srv/keyring.debian.org/keyrings/debian-keyring.pgp",
+              "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg" );
 
 # our log file
 $logfile = "$queued_dir/log";
index 256561a7d479be6695681060c54ad903a9e9caf3..af8d70daef1a1cb14e497e350bad386234380937 100755 (executable)
@@ -21,6 +21,9 @@ use Net::Ping;
 use Net::FTP;
 use Socket qw( PF_INET AF_INET SOCK_STREAM );
 use Config;
+use Sys::Hostname;
+
+setlocale(&POSIX::LC_ALL, "C");
 
 # ---------------------------------------------------------------------------
 #                                                              configuration
@@ -61,6 +64,8 @@ package main;
 
 ( $main::progname = $0 ) =~ s,.*/,,;
 
+($main::hostname, undef, undef, undef, undef) = gethostbyname(hostname());
+
 my %packages = ();
 
 # extract -r and -k args
@@ -135,7 +140,7 @@ if ( !@ARGV ) {
     POSIX::sigsuspend($sigset);
     waitpid( $pid, WNOHANG );
     if ( kill( 0, $pid ) ) {
-      print "Daemon started in background (pid $pid)\n";
+      print "Daemon (on $main::hostname) started in background (pid $pid)\n";
       exit 0;
     } else {
       exit 1;
@@ -165,9 +170,7 @@ my $parent_pid = $ARGV[1];
 
 do {
   my $version;
-  ( $version =
-'Release: 0.9 $Revision: 1.51 $ $Date: 1999/07/08 09:43:21 $ $Author: ftplinux $'
-  ) =~ s/\$ ?//g;
+  ( $version = 'Release: 0.95' ) =~ s/\$ ?//g;
   print "debianqueued $version\n";
 };
 
@@ -329,7 +332,7 @@ open( STDERR, ">&LOG" )
   or die "$main::progname: Can't redirect stderr to $conf::logfile: $!\n";
 
 # ok, from this point usually no "die" anymore, stderr is gone!
-msg( "log", "daemon (pid $$) started\n" );
+msg( "log", "daemon (pid $$) (on $main::hostname) started\n" );
 
 # initialize variables used by send_status before launching the status daemon
 $main::dstat = "i";
@@ -1208,7 +1211,9 @@ outer_loop: while (<COMMANDS>) {
           $selecteddelayed = $1;
           s,^DELAYED/[0-9]+-day/,,;
         }
-        if ( $origword eq "--searchdirs" ) {
+        if (m,(^|/)\*,) {
+          msg("mail,log", "$_: filename component cannot start with a wildcard\n");
+        } elsif ( $origword eq "--searchdirs" ) {
           $selecteddelayed = -2;
         } elsif (m,/,) {
           msg(
@@ -2339,7 +2344,7 @@ __MESSAGE__
   }
 
   $message .= "\n$text";
-  $message .= "\nGreetings,\n\n\tYour Debian queue daemon\n";
+  $message .= "\nGreetings,\n\n\tYour Debian queue daemon (running on host $main::hostname)\n";
 
   my $mail = Email::Send->new;
   for (qw[Sendmail SMTP]) {
diff --git a/web/ftpmaster.pdf b/web/ftpmaster.pdf
new file mode 120000 (symlink)
index 0000000..73688e1
--- /dev/null
@@ -0,0 +1 @@
+../docs/talks/DebConf9/ftpmaster.pdf
\ No newline at end of file
index 1597e126583eb5ebd9088b0bf5b8719698e125d1..1b684159c7cd2a2ea6595cb7dc4b9d8e686a811e 100644 (file)
@@ -46,6 +46,7 @@
                        <li><a href="#rejections">Rejections</a></li>
 <!--                   <li><a href="#dependencies">Unmet Dependencies</a></li>-->
                        <li><a href="#archivecriteria">Archive Criteria</a></li>
+                       <li><a href="#talks">Talks</a></li>
                  </ul>
                </div>
 
@@ -65,7 +66,7 @@
                <div id="ftpteam">
             <h1>The ftpmaster team</h1>
 
-            <p>The members of ftpmaster currently are divided into two groups, FTP Master and FTP Assistants.</p>
+           <p>The members of ftpmaster currently are divided into three groups, FTP Master,FTP Assistants and FTP Trainee.</p>
             <p>Members of FTP Master are:</p>
             <ul>
                 <li>Joerg Jaspert</li>
             </ul>
             <p>The FTP Assistants are:</p>
             <ul>
-                <li>Kalle Kivimaa</li>
                                <li>Frank Lichtenheld</li>
                                <li>Mike O'Connor</li>
+                               <li>Alexander Reichle-Schmehl</li>
+                               <li>Chris Lamb</li>
+                               <li>Barry deFreese</li>
+                               <li>Torsten Werner</li>
             </ul>
             
             <p>This information (and more like it) is available from
             <a href="http://www.debian.org/intro/organization">Debian's Organizational Structure</a>.</p>
+
+           <p>The FTP Master role, unix group <tt>debadmin</tt>, is responsible for:</p>
+           <ul>
+                   <li>Keep the archive running</li>
+                   <li>Keep the archive legal</li>
+                   <li>Support the teams that depend on it (Release, Security, soon backports/volatile)</li>
+                   <li>Keep the archive uptodate with the requirements of the project</li>
+           </ul>
+
+           <p>The FTP Assistent role, unix group <tt>ftpteam</tt>, created in 2005,
+           allows the addition of people to the FTP Team without having to hand out
+           full FTP Master rights. It allows</p>
+           <ul>
+                   <li>to process NEW,</li>
+                   <li>to handle overrides,</li>
+                   <li>to remove packages.</li>
+           </ul>
+
+           <p>The FTP Trainee role, group <tt>ftptrainee</tt>, was created in 2008 to allow easy training and testing of future team members.</p>
+            <p>Trainees can look at NEW and do the usual package checks, but they can not actually accept or reject a package. Instead they leave a note, which an Assistant or Master reads and acts on.</p>
         </div>
 
         <div id="dak">
             <a href="/archive-criteria.html">Criteria</a> for inclusion in the archive
             of new architectures.
         </div>
+        <div id="talks">
+            <h1>Talks</h1>
+
+            <a href="ftpmaster.pdf">DebConf 2009</a>, Cáceres, Spain.
         </div>
+
+               </div>
         </div>
         </div>
     <hr />
index 80cb926b2fe88c547b02611f49fc22c6894f01bd..c7be05be94a7519982969d542800d5702366e5c3 100644 (file)
@@ -113,7 +113,7 @@ Rejected: file 'bind9_9.1.0-2_i386.deb' has unknown component 'non-US/main'.
       <dd>Specify a valid component and reupload.</dd>
 
       <dt>Notes</dt>
-      <dd>The component is specified as a prefix to the section information in 'debian/control'.  Valid components for uploads to ftp-master.debian.org are: 'main', 'contrib' and 'non-free'.  Valid components for uploads to non-US.debian.org are: 'non-US/main', 'non-US/contrib' and 'non-US/non-free'.</dd>
+      <dd>The component is specified as a prefix to the section information in 'debian/control'.  Valid components for uploads to ftp-master.debian.org are: 'main', 'contrib' and 'non-free'.</dd>
     </dl>
 
     <hr>
@@ -123,8 +123,6 @@ Rejected: file 'bind9_9.1.0-2_i386.deb' has unknown component 'non-US/main'.
     <hr>
     <address><a href="mailto:ftpmaster@ftp-master.debian.org">Archive maintainance team</a></address>
 <!-- Created: Tue Mar  6 00:41:39 GMT 2001 -->
-<!-- hhmts start -->
-Last modified: Sat May 26 21:44:34 BST 2001
-<!-- hhmts end -->
+<!-- hhmts start -->Last modified: Mon Sep  7 23:45:58 CEST 2009 <!-- hhmts end -->
   </body>
 </html>