]> git.decadent.org.uk Git - dak.git/commitdiff
Merge branch 'master' into bpo
authorJoerg Jaspert <joerg@debian.org>
Tue, 7 Sep 2010 22:21:13 +0000 (00:21 +0200)
committerJoerg Jaspert <joerg@debian.org>
Tue, 7 Sep 2010 22:21:13 +0000 (00:21 +0200)
* master:
  Call with the configfile as argument
  Add (C) for uli
  Use the right path
  add config file for removalsrss
  move removals.pl magic constants into config file

Signed-off-by: Joerg Jaspert <joerg@debian.org>
41 files changed:
config/backports.org/Contents.top [deleted file]
config/backports.org/apt.conf [deleted file]
config/backports.org/bpo-copy-packages [deleted file]
config/backports.org/cron.buildd [deleted file]
config/backports.org/cron.daily [deleted file]
config/backports.org/cron.hourly [deleted file]
config/backports.org/cron.monthly [deleted file]
config/backports.org/cron.unchecked [deleted file]
config/backports.org/cron.weekly [deleted file]
config/backports.org/dak.conf [deleted file]
config/backports.org/vars [deleted file]
config/backports/Contents.top [new file with mode: 0644]
config/backports/apt.conf [new file with mode: 0644]
config/backports/bpo-copy-packages [new file with mode: 0755]
config/backports/common [new file with mode: 0644]
config/backports/cron.daily [new file with mode: 0755]
config/backports/cron.dinstall [new file with mode: 0755]
config/backports/cron.monthly [new file with mode: 0755]
config/backports/cron.reboot [new file with mode: 0755]
config/backports/cron.unchecked [new file with mode: 0755]
config/backports/cron.weekly [new file with mode: 0755]
config/backports/dak.conf [new file with mode: 0644]
config/backports/dak.conf-etc [new file with mode: 0644]
config/backports/dinstall.functions [new file with mode: 0644]
config/backports/dinstall.variables [new file with mode: 0644]
config/backports/vars [new file with mode: 0644]
config/debian/dak.conf
daklib/dbconn.py
scripts/backports.org/copyoverrides [deleted file]
scripts/backports.org/mkchecksums [deleted file]
scripts/backports.org/mklslar [deleted file]
scripts/backports.org/mkmaintainers [deleted file]
scripts/backports/copyoverrides [new file with mode: 0755]
scripts/backports/expire_dumps [new symlink]
scripts/backports/mkchecksums [new file with mode: 0755]
scripts/backports/mklslar [new file with mode: 0755]
scripts/backports/mkmaintainers [new file with mode: 0755]
tools/debianqueued-0.9/config
tools/debianqueued-0.9/config-backports [new file with mode: 0644]
tools/debianqueued-0.9/config-upload
tools/debianqueued-0.9/debianqueued

diff --git a/config/backports.org/Contents.top b/config/backports.org/Contents.top
deleted file mode 100644 (file)
index ee791eb..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-This file maps each file available in the backports.org archive system to
-the package from which it originates.  It includes packages from the
-DIST distribution for the ARCH architecture.
-
-You can use this list to determine which package contains a specific
-file, or whether or not a specific file is available.  The list is
-updated weekly, each architecture on a different day.
-
-When a file is contained in more than one package, all packages are
-listed.  When a directory is contained in more than one package, only
-the first is listed.
-
-The best way to search quickly for a file is with the Unix `grep'
-utility, as in `grep <regular expression> CONTENTS':
-
- $ grep nose Contents
- etc/nosendfile                                          net/sendfile
- usr/X11R6/bin/noseguy                                   x11/xscreensaver
- usr/X11R6/man/man1/noseguy.1x.gz                        x11/xscreensaver
- usr/doc/examples/ucbmpeg/mpeg_encode/nosearch.param     graphics/ucbmpeg
- usr/lib/cfengine/bin/noseyparker                        admin/cfengine
-
-This list contains files in all packages, even though not all of the
-packages are installed on an actual system at once.  If you want to
-find out which packages on an installed Debian system provide a
-particular file, you can use `dpkg --search <filename>':
-
- $ dpkg --search /usr/bin/dselect
- dpkg: /usr/bin/dselect
-
-
-FILE                                                    LOCATION
diff --git a/config/backports.org/apt.conf b/config/backports.org/apt.conf
deleted file mode 100644 (file)
index 1fe25ae..0000000
+++ /dev/null
@@ -1,71 +0,0 @@
-Dir
-{
-   ArchiveDir "/org/backports.org/ftp/";
-   OverrideDir "/org/backports.org/scripts/override/";
-   CacheDir "/org/backports.org/database/";
-};
-
-Default
-{
-   Packages::Compress ". gzip bzip2";
-   Sources::Compress ". gzip bzip2";
-   DeLinkLimit 0;
-   FileMode 0664;
-   Contents::Compress "gzip";
-   MaxContentsChange 12000;
-};
-
-TreeDefault
-{
-   Contents::Header "/org/backports.org/dak-config/Contents.top";
-};
-
-tree "dists/lenny-backports"
-{
-   FileList "/org/backports.org/database/dists/lenny-backports_$(SECTION)_binary-$(ARCH).list";
-   SourceFileList "/org/backports.org/database/dists/lenny-backports_$(SECTION)_source.list";
-   Sections "main contrib non-free";
-   Architectures "alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
-   BinOverride "override.lenny-backports.$(SECTION)";
-   ExtraOverride "override.lenny-backports.extra.$(SECTION)";
-   SrcOverride "override.lenny-backports.$(SECTION).src";
-   Packages::Compress ". gzip bzip2";
-   Sources::Compress ". gzip bzip2";
-};
-
-tree "dists/lenny-backports/main"
-{
-   FileList "/org/backports.org/database/dists/lenny-backports_main_$(SECTION)_binary-$(ARCH).list";
-   Sections "debian-installer";
-   Architectures "alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
-   BinOverride "override.lenny-backports.main.$(SECTION)";
-   SrcOverride "override.lenny-backports.main.src";
-   BinCacheDB "packages-debian-installer-$(ARCH).db";
-   Packages::Extensions ".udeb";
-   Contents "$(DIST)/../Contents-udeb";
-};
-
-tree "dists/etch-backports"
-{
-   FileList "/org/backports.org/database/dists/etch-backports_$(SECTION)_binary-$(ARCH).list";
-   SourceFileList "/org/backports.org/database/dists/etch-backports_$(SECTION)_source.list";
-   Sections "main contrib non-free";
-   Architectures "alpha amd64 arm hppa hurd-i386 i386 ia64 mips mipsel m68k powerpc s390 sh sparc source";
-   BinOverride "override.etch-backports.$(SECTION)";
-   ExtraOverride "override.etch-backports.extra.$(SECTION)";
-   SrcOverride "override.etch-backports.$(SECTION).src";
-   Packages::Compress ". gzip bzip2";
-   Sources::Compress ". gzip bzip2";
-};
-
-tree "dists/etch-backports/main"
-{
-   FileList "/org/backports.org/database/dists/etch-backports_main_$(SECTION)_binary-$(ARCH).list";
-   Sections "debian-installer";
-   Architectures "alpha amd64 arm hppa hurd-i386 i386 ia64 mips mipsel m68k powerpc s390 sh sparc source";
-   BinOverride "override.etch-backports.main.$(SECTION)";
-   SrcOverride "override.etch-backports.main.src";
-   BinCacheDB "packages-debian-installer-$(ARCH).db";
-   Packages::Extensions ".udeb";
-   Contents "$(DIST)/../Contents-udeb";
-};
diff --git a/config/backports.org/bpo-copy-packages b/config/backports.org/bpo-copy-packages
deleted file mode 100755 (executable)
index 6753705..0000000
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2005 Peter Palfrader <peter@palfrader.org>
-
-# WARNING: spaces in filenames/paths considered harmful.
-
-export SCRIPTVARS=/org/backports.org/dak/config/backports.org/vars
-. $SCRIPTVARS
-
-cd ${configdir}
-
-for suite in etch lenny; do
-    source="${ftpdir}/dists/${suite}-backports"
-    target="${base}/buildd/dists/${suite}-backports"
-
-    if ! [ -d "$source" ]; then
-           echo "Source '$source' does not exist or is not a directory or we can't acess it." >&2
-           exit 1;
-    fi
-    if ! [ -d "$target" ]; then
-           echo "Target '$target' does not exist or is not a directory or we can't acess it." >&2
-           exit 1;
-    fi
-
-    for file in $( cd "$source" && find . -name 'Packages.gz' -o -name 'Packages' -o -name 'Sources.gz' -o -name 'Sources' -o -name 'Release' ); do
-           basedir=$(dirname "$file")
-           basename=$(basename "$file")
-           targetdir="$target/$basedir"
-           [ -d "$targetdir" ] || mkdir -p "$targetdir"
-           if [ "$basename" = "Release" ]; then
-                   cp -a "$source/$file" "$target/$file"
-                   echo 'NotAutomatic: yes' >> "$target/$file"
-           else
-                   cp -a "$source/$file" "$target/$file"
-           fi
-    done
-
-# postprocess top level Release file
-    if ! [ -e "$target/Release" ]; then
-           echo "Did not find $target/Release after copying stuff.  something's fishy" >&2
-           exit 1;
-    fi
-
-    cd "$target"
-
-    perl -a -p -i -e '
-       if (substr($_,0,1) eq " ") {
-               if ($in_sha1 || $in_md5) {
-                       ($hash, $size, $file) = @F;
-                       $_="",next unless -f $file;
-
-                       (undef,undef,undef,undef,undef,undef,undef,$filesize,
-                        undef,undef,undef,undef,undef) = stat($file);
-                       if ($size != $filesize) {
-                               if ($in_sha1) {
-                                       $hash = `sha1sum "$file" | cut -d " " -f 1`
-                               } else {
-                                       $hash = `md5sum "$file" | cut -d " " -f 1`
-                               };
-                               chomp $hash;
-                               $_ = sprintf(" %s %16d %s\n", $hash, $filesize, $file);
-                       }
-               }
-       } else {
-               $in_sha1 = ($F[0] eq "SHA1:") ? 1 : 0;
-               $in_md5  = ($F[0] eq "MD5Sum:") ? 1 : 0;
-       }
-' Release
-
-    rm -f ${base}/buildd/dists/${suite}-backports/Release.gpg
-    gpg --no-options --batch --no-tty --secret-keyring ${base}/s3kr1t/dot-gnupg/secring.gpg --output "Release.gpg" --armor --detach-sign "Release"
-done
diff --git a/config/backports.org/cron.buildd b/config/backports.org/cron.buildd
deleted file mode 100755 (executable)
index f48f98f..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-#! /bin/bash -e
-
-# Executed hourly via cron, out of katie's crontab.
-# stolen from newraff and adjusted by aba on 2005-04-30
-#exit 0
-
-export SCRIPTVARS=/org/backports.org/dak-config/vars
-. $SCRIPTVARS
-ssh -i $base/s3kr1t/dot-ssh/id_rsa wanna-build@wanna-build.farm.ftbfs.de echo broken
-exit 0
diff --git a/config/backports.org/cron.daily b/config/backports.org/cron.daily
deleted file mode 100755 (executable)
index b84d801..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-#! /bin/sh
-#
-# Executed daily via cron, out of katie's crontab.
-
-set -e
-export SCRIPTVARS=/org/backports.org/dak-config/vars
-. $SCRIPTVARS
-
-################################################################################
-# Clean out old packages
-dak clean-suites
-dak clean-queues
-
-# Send a report on NEW/BYHAND packages
-dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@backports.org
-# and one on crufty packages
-dak cruft-report | tee $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@backports.org
-
-echo Daily cron scripts successful.
diff --git a/config/backports.org/cron.hourly b/config/backports.org/cron.hourly
deleted file mode 100755 (executable)
index b5e0646..0000000
+++ /dev/null
@@ -1,117 +0,0 @@
-#! /bin/sh
-#
-# Executed daily via cron, out of katie's crontab.
-set -e
-export SCRIPTVARS=/org/backports.org/dak-config/vars
-. $SCRIPTVARS
-
-################################################################################
-cd $accepted
-
-changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
-
-if [ -z "$changes" ]; then
- exit 0;
-fi
-
-echo Archive maintenance started at $(date +%X)
-
-NOTICE="$ftpdir/Archive_Maintenance_In_Progress"
-LOCKCU="$lockdir/daily.lock"
-LOCKAC="$lockdir/unchecked.lock"
-
-cleanup() {
-  rm -f "$NOTICE"
-  rm -f "$LOCKCU"
-}
-trap cleanup 0
-
-rm -f "$NOTICE"
-lockfile -l 3600 $LOCKCU
-cat > "$NOTICE" <<EOF
-Packages are currently being installed and indices rebuilt.
-Maintenance is automatic, starting hourly at 5 minutes past the hour.
-Most of the times it is finished after about 10 til 15 minutes.
-
-You should not mirror the archive during this period.
-EOF
-
-################################################################################
-
-cd $accepted
-rm -f REPORT
-dak process-accepted -pa *.changes | tee REPORT | \
-     mail -s "Install for $(date +%d.%m.%Y)" ftpmaster@backports.org
-chgrp debadmin REPORT
-chmod 664 REPORT
-
-cd $masterdir
-
-rm -f $LOCKAC
-
-symlinks -d -r $ftpdir
-
-cd $masterdir
-dak make-suite-file-list
-dak generate-filelist
-
-# Generate override files
-cd $overridedir
-dak make-overrides
-
-# Generate Packages and Sources files
-cd $configdir
-apt-ftparchive generate apt.conf
-# Generate *.diff/ incremental updates
-dak generate-index-diffs
-# Generate Release files
-dak generate-releases
-
-# Clean out old packages
-# Now in cron.daily. JJ[03.05.2005.]
-#rhona
-#shania
-
-cd $scriptsdir
-./mkmaintainers
-./copyoverrides
-./mklslar
-./mkchecksums
-
-rm -f $NOTICE
-rm -f $LOCKCU
-echo Archive maintenance finished at $(date +%X)
-
-################################################################################
-
-echo "Creating post-hourly-cron-job backup of projectb database..."
-POSTDUMP=/org/backports.org/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
-pg_dump projectb > $POSTDUMP
-(cd /org/backports.org/backup; ln -sf $POSTDUMP current)
-
-################################################################################
-
-# Vacuum the database
-echo "VACUUM; VACUUM ANALYZE;" | psql projectb 2>&1 | grep -v "^NOTICE:  Skipping.*only table owner can VACUUM it$"
-
-################################################################################
-
-# Now in cron.daily JJ[03.05.2005]
-# Send a report on NEW/BYHAND packages
-#helena | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@amd64.debian.net
-# and one on crufty package
-#rene | mail -e -s "rene run for $(date +%D)" ftpmaster@amd64.debian.net
-
-################################################################################
-
-(cd /org/backports.org/stats; rm -f master.list; ./dmc.pl get >/dev/null 2>&1; \
-./mirror.pl>$ftpdir/README.mirrors.html; cd $ftpdir; /usr/bin/links -dump README.mirrors.html >README.mirrors.txt)
-
-
-################################################################################
-
-ulimit -m 90000 -d 90000 -s 10000 -v 90000
-
-run-parts --report /org/backports.org/scripts/distmnt
-
-echo Daily cron scripts successful.
diff --git a/config/backports.org/cron.monthly b/config/backports.org/cron.monthly
deleted file mode 100755 (executable)
index f604936..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/sh
-#
-# Run at the beginning of the month via cron, out of katie's crontab.
-
-set -e
-export SCRIPTVARS=/org/backports.org/dak-config/vars
-. $SCRIPTVARS
-
-################################################################################
-
-DATE=`date -d yesterday +%y%m`
-
-cd ${basedir}/mail/archive
-for m in mail import; do
-    if [ -f $m ]; then
-        mv $m ${m}-$DATE
-        sleep 20
-        gzip -9 ${m}-$DATE
-        chgrp debadmin ${m}-$DATE.gz
-        chmod 660 ${m}-$DATE.gz
-    fi;
-done
-
-DATE=`date +%Y-%m`
-cd ${basedir}/log
-touch $DATE
-rm current
-ln -s $DATE current
-chmod g+w $DATE
-chown dak:debadmin $DATE
-
-dak split-done
-################################################################################
diff --git a/config/backports.org/cron.unchecked b/config/backports.org/cron.unchecked
deleted file mode 100755 (executable)
index bb2337e..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-#! /bin/sh
-set -e
-export SCRIPTVARS=/org/backports.org/dak-config/vars
-. $SCRIPTVARS
-
-LOCKFILE="$lockdir/unchecked.lock"
-NOTICE="$lockdir/daily.lock"
-
-cleanup() {
-  rm -f "$LOCKFILE"
-  if [ ! -z $LOCKDAILY ]; then
-         rm -f "$NOTICE"
-  fi
-}
-trap cleanup 0
-
-# only run one cron.unchecked
-if lockfile -r3 $LOCKFILE; then
-       cd $unchecked
-
-       changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
-       report=$queuedir/REPORT
-       timestamp=$(date "+%Y-%m-%d %H:%M")
-
-       if [ ! -z "$changes" ]; then
-               echo "$timestamp": "$changes"  >> $report
-               dak process-unchecked -a $changes >> $report
-               echo "--" >> $report
-       else
-               echo "$timestamp": Nothing to do >> $report
-       fi
-fi
-
-rm -f "$LOCKFILE"
diff --git a/config/backports.org/cron.weekly b/config/backports.org/cron.weekly
deleted file mode 100755 (executable)
index 0ab9afd..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/sh
-#
-# Run once a week via cron, out of katie's crontab.
-
-set -e
-export SCRIPTVARS=/org/backports.org/dak-config/vars
-. $SCRIPTVARS
-
-################################################################################
-
-# Purge empty directories
-
-if [ ! -z "$(find $ftpdir/pool/ -type d -empty)" ]; then
-   find $ftpdir/pool/ -type d -empty | xargs rmdir;
-fi
-
-# Clean up apt-ftparchive's databases
-
-cd $configdir
-apt-ftparchive -q clean apt.conf
-
-################################################################################
diff --git a/config/backports.org/dak.conf b/config/backports.org/dak.conf
deleted file mode 100644 (file)
index 31ef697..0000000
+++ /dev/null
@@ -1,407 +0,0 @@
-Dinstall
-{
-   // Both need to be defined at the moment, but they can point to the
-   // same file.
-   GPGKeyring {
-      "/org/backports.org/keyrings/keyring.gpg";
-   };
-   // To sign the release files. Adjust the keyid!
-   // Note: Key must be without a passphrase or it wont work automagically!
-   SigningKeyring "/org/backports.org/s3kr1t/dot-gnupg/secring.gpg";
-   SigningPubKeyring "/org/backports.org/s3kr1t/dot-gnupg/pubring.gpg";
-   SigningKeyIds "16BA136C";
-   SendmailCommand "/usr/sbin/sendmail -odq -oi -t";
-   MyEmailAddress "Backports.org archive Installer <installer@backports.org>";
-   MyAdminAddress "ftpmaster@backports.org";
-   MyHost "backports.org";  // used for generating user@my_host addresses in e.g. manual_reject()
-   MyDistribution "Backports.org archive"; // Used in emails
-   // Alicia and melanie can use it
-   BugServer "bugs.backports.org";
-   // melanie uses the packages server.
-   // PackagesServer "packages.test.backports.org";
-   // If defined then the package@this.server gets a copy of most of the
-   // actions related to the package. For an example look at
-   // packages.qa.debian.org
-   // TrackingServer "packages.qa.test.backports.org";
-   LockFile "/org/backports.org/lock/dinstall.lock";
-   // If defined this address gets a bcc of all mails.
-   // FIXME: Einrichten wenn das hier produktiv geht!
-   Bcc "backports-archive@lists.backports.org";
-   GroupOverrideFilename "override.group-maint";
-   FutureTimeTravelGrace 28800; // 8 hours
-   PastCutoffYear "1984";
-   SkipTime 300;
-   // If defined then mails to close bugs are sent to the bugserver.
-   CloseBugs "false";
-   OverrideDisparityCheck "true";
-   DefaultSuite "etch-backports";
-   Reject
-   {
-     NoSourceOnly "true";
-     ReleaseTransitions "/org/backports.org/hints/transitions.yaml";
-   };
-   // If set, only send mails to addresses listed there.
-   MailWhiteList "/org/backports.org/dak/config/backports.org/mail-whitelist";
-};
-
-Generate-Index-Diffs
-{
-   Options
-   {
-     TempDir "/org/backports.org/tiffani";
-     MaxDiffs { Default 50; };
-   };
-};
-
-Override
-{
-   MyEmailAddress "Backports.org archive FTP Masters <ftpmaster@backports.org>";
-};
-
-Add-User
-{
-// Should we sent a mail to newly added users?
-  SendEmail "true";
-
-// Should we create an account so they can login?
-// Account will be created with the defaults from adduser, so adjust
-// it's configuration to fit your needs.
-// NOTE: This requires that your dak user has a sudo entry, allowing
-// to run /usr/sbin/useradd!
-  CreateAccount "false";
-
-// Note: This is a comma separated list of additional groupnames to
-// which uma should add the user. NO spaces between the groupnames or
-// useradd will die.
-// Disable it if you dont want or need that feature.
-  GID "debuser";
-
-};
-
-Check-Overrides
-{
-  OverrideSuites
-  {
-    lenny-backports
-    {
-      Process "1";
-//      OriginSuite "Unstable";
-    };
-
-    etch-backports
-    {
-      Process "1";
-//      OriginSuite "Unstable";
-    };
-
-//    Unstable
-//    {
-//    Process "0";
-//  };
-  };
-};
-
-
-Import-Users-From-Passwd
-{
-  // The Primary GID of your users. Using uma it is the gid from group users.
-  ValidGID "1001";
-  // Comma separated list of users who are in Postgres but not the passwd file
-  KnownPostgres "postgres,katie";
-};
-
-Clean-Queues
-{
-  Options
-  {
-    Days 14;
-   };
- MorgueSubDir "queues";
-};
-
-Control-Overrides
-{
-  Options
-  {
-    Component "main";
-    Suite "etch-backports";
-    Type "deb";
-   };
-
- ComponentPosition "prefix"; // Whether the component is prepended or appended to the section name
-};
-
-Rm
-{
-  Options
-  {
-    Suite "etch-backports";
-   };
-
-   MyEmailAddress "Backports.org archive Maintenance <ftpmaster@backports.org>";
-   LogFile "/org/backports.org/ftp/removals.txt";
-};
-
-Import-Archive
-{
-  ExportDir "/org/backports.org/dak/import-archive-files/";
-};
-
-Clean-Suites
-{
-  // How long (in seconds) dead packages are left before being killed
-  StayOfExecution 1209600; // 14 days
-  AcceptedAutoBuildStayOfExecution 86400; // 24 hours
-  MorgueSubDir "pool";
-};
-
-Process-New
-{
-  AcceptedLockFile "/org/backports.org/lock/unchecked.lock";
-};
-
-Suite
-{
-  lenny-backports
-  {
-       Components
-       {
-         main;
-         contrib;
-         non-free;
-       };
-
-       Announce "backports-changes@lists.backports.org";
-       Origin "Backports.org archive";
-       Description "Backports for the Lenny Distribution";
-       CodeName "lenny-backports";
-       OverrideCodeName "lenny-backports";
-       Priority "7";
-       NotAutomatic "yes";
-  };
-
-  etch-backports
-  {
-       Components
-       {
-         main;
-         contrib;
-         non-free;
-       };
-       Announce "backports-changes@lists.backports.org";
-       Origin "Backports.org archive";
-       Description "Backports for the Etch Distribution";
-       CodeName "etch-backports";
-       OverrideCodeName "etch-backports";
-       Priority "7";
-       NotAutomatic "yes";
-  };
-
-};
-
-Dir
-{
-  Root "/org/backports.org/ftp/";
-  Pool "/org/backports.org/ftp/pool/";
-  Templates "/org/backports.org/dak/templates/";
-  PoolRoot "pool/";
-  Lists "/org/backports.org/database/dists/";
-  Log "/org/backports.org/log/";
-  Morgue "/org/backports.org/morgue/";
-  MorgueReject "reject";
-  Lock "/org/backports.org/lock";
-  Override "/org/backports.org/scripts/override/";
-  UrgencyLog "/org/backports.org/testing/urgencies/";
-  Queue
-  {
-    Accepted "/org/backports.org/queue/accepted/";
-    Byhand "/org/backports.org/queue/byhand/";
-    Done "/org/backports.org/queue/done/";
-    Holding "/org/backports.org/queue/holding/";
-    New "/org/backports.org/queue/new/";
-    ProposedUpdates "/org/backports.org/queue/p-u-new/";
-    Reject "/org/backports.org/queue/reject/";
-    Unchecked "/org/backports.org/queue/unchecked/";
-    BTSVersionTrack "/org/backports.org/queue/bts_version_track/";
-    Embargoed "/org/backports.org/queue/Embargoed/";
-    Unembargoed "/org/backports.org/queue/Unembargoed/";
-    OldProposedUpdates "/org/backports.org/queue/Unembargoed/";
-  };
-};
-
-DB
-{
-  Name "projectb";
-  Host "";
-  Port -1;
-};
-
-SuiteMappings
-{
- "propup-version stable-security testing";
- "propup-version testing-security unstable";
-// "map stable proposed-updates";
- "map lenny lenny-backports";
- "map lenny-bpo lenny-backports";
- "map etch etch-backports";
-// formi mag des nit
-// "map stable etch-backports";
- "map etch-bpo etch-backports";
-// "map stable-security proposed-updates";
-// "map-unreleased stable unstable";
-// "map-unreleased proposed-updates unstable";
-// "map testing testing-proposed-updates";
-// "map testing-security testing-proposed-updates";
-// "map-unreleased testing unstable";
-// "map-unreleased testing-proposed-updates unstable";
-};
-
-Architectures
-{
-  source "Source";
-  all "Architecture Independent";
-  alpha "DEC Alpha";
-  amd64 "AMD x86_64 (AMD64)";
-  hurd-i386 "Intel ia32 running the HURD";
-  hppa "HP PA RISC";
-  arm "ARM";
-  armel "ARM EABI";
-  i386 "Intel ia32";
-  ia64 "Intel ia64";
-  m68k "Motorola Mc680x0";
-  mips "MIPS (Big Endian)";
-  mipsel "MIPS (Little Endian)";
-  powerpc "PowerPC";
-  s390 "IBM S/390";
-  sh "Hitatchi SuperH";
-  sparc "Sun SPARC/UltraSPARC";
-};
-
-Archive
-{
-  backports
-  {
-    OriginServer "backports.org";
-    PrimaryMirror "backports.org";
-    Description "Master Archive for Backports.org archive";
-  };
-};
-
-Component
-{
-  main
-  {
-       Description "Main";
-       MeetsDFSG "true";
-  };
-
-  contrib
-  {
-       Description "Contrib";
-       MeetsDFSG "true";
-  };
-
-  non-free
-  {
-        Description "Software that fails to meet the DFSG";
-        MeetsDFSG "false";
-  };
-
-};
-
-Section
-{
-  admin;
-  cli-mono;
-  comm;
-  database;
-  debian-installer;
-  debug;
-  devel;
-  doc;
-  editors;
-  embedded;
-  electronics;
-  fonts;
-  games;
-  gnome;
-  graphics;
-  gnu-r;
-  gnustep;
-  hamradio;
-  haskell;
-  httpd;
-  interpreters;
-  java;
-  kde;
-  kernel;
-  libdevel;
-  libs;
-  lisp;
-  localization;
-  mail;
-  math;
-  misc;
-  net;
-  news;
-  ocaml;
-  oldlibs;
-  otherosfs;
-  perl;
-  php;
-  python;
-  ruby;
-  science;
-  shells;
-  sound;
-  tex;
-  text;
-  utils;
-  web;
-  vcs;
-  video;
-  x11;
-  xfce;
-  zope;
-};
-
-Priority
-{
-  required 1;
-  important 2;
-  standard 3;
-  optional 4;
-  extra 5;
-  source 0; // i.e. unused
-};
-
-OverrideType
-{
-  deb;
-  udeb;
-  dsc;
-};
-
-Location
-{
-  // Pool locations on backports.org
-  /org/backports.org/ftp/pool/
-    {
-      Archive "backports";
-      Type "pool";
-    };
-
-};
-
-Urgency
-{
-  Default "low";
-  Valid
-  {
-    low;
-    medium;
-    high;
-    emergency;
-    critical;
-  };
-};
diff --git a/config/backports.org/vars b/config/backports.org/vars
deleted file mode 100644 (file)
index e61a11b..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-# locations used by many scripts
-
-base=/org/backports.org
-ftpdir=$base/ftp/
-webdir=$base/web
-
-archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 m68k mips mipsel powerpc s390 sh sparc"
-
-masterdir=$base/dak/
-overridedir=$base/scripts/override
-extoverridedir=$scriptdir/external-overrides
-configdir=$base/dak/config/backports.org/
-scriptsdir=$base/dak/scripts/backports.org/
-
-queuedir=$base/queue
-unchecked=$queuedir/unchecked/
-accepted=$queuedir/accepted/
-done=$queuedir/done/
-over=$base/over/
-lockdir=$base/lock/
-incoming=$base/incoming
-
-dbdir=$base/database/
-indices=$ftpdir/indices
-
-ftpgroup=debadmin
-
-copyoverrides="lenny-backports.contrib lenny-backports.contrib.src lenny-backports.main lenny-backports.main.debian-installer lenny-backports.main.src lenny-backports.extra.contrib lenny-backports.extra.main"
-
-# Change this to your hostname
-uploadhost=localhost
-uploaddir=/pub/UploadQueue/
-
-# What components to support
-components="main contrib non-free"
-suites="lenny-backports"
-override_types="deb dsc udeb"
-
-# temporary fix only!
-# export TMP=/org/backports.org/tmp
-# export TEMP=/org/backports.org/tmp
-# export TMPDIR==/org/backports.org/tmp
-
-PATH=$masterdir:$PATH
-umask 022
diff --git a/config/backports/Contents.top b/config/backports/Contents.top
new file mode 100644 (file)
index 0000000..e03f7a6
--- /dev/null
@@ -0,0 +1,32 @@
+This file maps each file available in the backports.debian.org archive
+system to the package from which it originates.  It includes packages
+from the DIST distribution for the ARCH architecture.
+
+You can use this list to determine which package contains a specific
+file, or whether or not a specific file is available.  The list is
+updated weekly, each architecture on a different day.
+
+When a file is contained in more than one package, all packages are
+listed.  When a directory is contained in more than one package, only
+the first is listed.
+
+The best way to search quickly for a file is with the Unix `grep'
+utility, as in `grep <regular expression> CONTENTS':
+
+ $ grep nose Contents
+ etc/nosendfile                                          net/sendfile
+ usr/X11R6/bin/noseguy                                   x11/xscreensaver
+ usr/X11R6/man/man1/noseguy.1x.gz                        x11/xscreensaver
+ usr/doc/examples/ucbmpeg/mpeg_encode/nosearch.param     graphics/ucbmpeg
+ usr/lib/cfengine/bin/noseyparker                        admin/cfengine
+
+This list contains files in all packages, even though not all of the
+packages are installed on an actual system at once.  If you want to
+find out which packages on an installed Debian system provide a
+particular file, you can use `dpkg --search <filename>':
+
+ $ dpkg --search /usr/bin/dselect
+ dpkg: /usr/bin/dselect
+
+
+FILE                                                    LOCATION
diff --git a/config/backports/apt.conf b/config/backports/apt.conf
new file mode 100644 (file)
index 0000000..43c4e5d
--- /dev/null
@@ -0,0 +1,97 @@
+Dir
+{
+   ArchiveDir "/srv/backports-master.debian.org/ftp/";
+   OverrideDir "/srv/backports-master.debian.org/scripts/override/";
+   CacheDir "/srv/backports-master.debian.org/database/";
+};
+
+Default
+{
+   Packages::Compress ". gzip bzip2";
+   Sources::Compress ". gzip bzip2";
+   DeLinkLimit 0;
+   FileMode 0664;
+   Contents::Compress "gzip";
+   MaxContentsChange 12000;
+};
+
+TreeDefault
+{
+   Contents::Header "/srv/backports-master.debian.org/dak/config/backports/Contents.top";
+};
+
+tree "dists/lenny-backports"
+{
+   FileList "/srv/backports-master.debian.org/database/dists/lenny-backports_$(SECTION)_binary-$(ARCH).list";
+   SourceFileList "/srv/backports-master.debian.org/database/dists/lenny-backports_$(SECTION)_source.list";
+   Sections "main contrib non-free";
+   Architectures "alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+   BinOverride "override.lenny-backports.$(SECTION)";
+   ExtraOverride "override.lenny-backports.extra.$(SECTION)";
+   SrcOverride "override.lenny-backports.$(SECTION).src";
+   Packages::Compress ". gzip bzip2";
+   Sources::Compress ". gzip bzip2";
+};
+
+tree "dists/lenny-backports/main"
+{
+   FileList "/srv/backports-master.debian.org/database/dists/lenny-backports_main_$(SECTION)_binary-$(ARCH).list";
+   Sections "debian-installer";
+   Architectures "alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+   BinOverride "override.lenny-backports.main.$(SECTION)";
+   SrcOverride "override.lenny-backports.main.src";
+   BinCacheDB "packages-debian-installer-$(ARCH).db";
+   Packages::Extensions ".udeb";
+   Contents "$(DIST)/../Contents-udeb";
+};
+
+tree "dists/squeeze-backports"
+{
+   FileList "/srv/backports-master.debian.org/database/dists/squeeze-backports_$(SECTION)_binary-$(ARCH).list";
+   SourceFileList "/srv/backports-master.debian.org/database/dists/squeeze-backports_$(SECTION)_source.list";
+   Sections "main contrib non-free";
+   Architectures "alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+   BinOverride "override.squeeze-backports.$(SECTION)";
+   ExtraOverride "override.squeeze-backports.extra.$(SECTION)";
+   SrcOverride "override.squeeze-backports.$(SECTION).src";
+   Packages::Compress ". gzip bzip2";
+   Sources::Compress ". gzip bzip2";
+};
+
+tree "dists/squeeze-backports/main"
+{
+   FileList "/srv/backports-master.debian.org/database/dists/squeeze-backports_main_$(SECTION)_binary-$(ARCH).list";
+   Sections "debian-installer";
+   Architectures "alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+   BinOverride "override.squeeze-backports.main.$(SECTION)";
+   SrcOverride "override.squeeze-backports.main.src";
+   BinCacheDB "packages-debian-installer-$(ARCH).db";
+   Packages::Extensions ".udeb";
+   Contents "$(DIST)/../Contents-udeb";
+};
+
+
+tree "dists/etch-backports"
+{
+   FileList "/srv/backports-master.debian.org/database/dists/etch-backports_$(SECTION)_binary-$(ARCH).list";
+   SourceFileList "/srv/backports-master.debian.org/database/dists/etch-backports_$(SECTION)_source.list";
+   Sections "main contrib non-free";
+   Architectures "alpha amd64 arm hppa hurd-i386 i386 ia64 mips mipsel m68k powerpc s390 sh sparc source";
+   BinOverride "override.etch-backports.$(SECTION)";
+   ExtraOverride "override.etch-backports.extra.$(SECTION)";
+   SrcOverride "override.etch-backports.$(SECTION).src";
+   Packages::Compress ". gzip bzip2";
+   Sources::Compress ". gzip bzip2";
+};
+
+tree "dists/etch-backports/main"
+{
+   FileList "/srv/backports-master.debian.org/database/dists/etch-backports_main_$(SECTION)_binary-$(ARCH).list";
+   Sections "debian-installer";
+   Architectures "alpha amd64 arm hppa hurd-i386 i386 ia64 mips mipsel m68k powerpc s390 sh sparc source";
+   BinOverride "override.etch-backports.main.$(SECTION)";
+   SrcOverride "override.etch-backports.main.src";
+   BinCacheDB "packages-debian-installer-$(ARCH).db";
+   Packages::Extensions ".udeb";
+   Contents "$(DIST)/../Contents-udeb";
+};
diff --git a/config/backports/bpo-copy-packages b/config/backports/bpo-copy-packages
new file mode 100755 (executable)
index 0000000..7bbeb3c
--- /dev/null
@@ -0,0 +1,72 @@
+#!/bin/bash
+
+# Copyright (c) 2005 Peter Palfrader <peter@palfrader.org>
+
+# WARNING: spaces in filenames/paths considered harmful.
+
+export SCRIPTVARS=/srv/backports-master.debian.org/dak/config/backports.org/vars
+. $SCRIPTVARS
+
+cd ${configdir}
+
+for suite in etch lenny; do
+    source="${ftpdir}/dists/${suite}-backports"
+    target="${base}/buildd/dists/${suite}-backports"
+
+    if ! [ -d "$source" ]; then
+           echo "Source '$source' does not exist or is not a directory or we can't acess it." >&2
+           exit 1;
+    fi
+    if ! [ -d "$target" ]; then
+           echo "Target '$target' does not exist or is not a directory or we can't acess it." >&2
+           exit 1;
+    fi
+
+    for file in $( cd "$source" && find . -name 'Packages.gz' -o -name 'Packages' -o -name 'Sources.gz' -o -name 'Sources' -o -name 'Release' ); do
+           basedir=$(dirname "$file")
+           basename=$(basename "$file")
+           targetdir="$target/$basedir"
+           [ -d "$targetdir" ] || mkdir -p "$targetdir"
+           if [ "$basename" = "Release" ]; then
+                   cp -a "$source/$file" "$target/$file"
+                   echo 'NotAutomatic: yes' >> "$target/$file"
+           else
+                   cp -a "$source/$file" "$target/$file"
+           fi
+    done
+
+# postprocess top level Release file
+    if ! [ -e "$target/Release" ]; then
+           echo "Did not find $target/Release after copying stuff.  something's fishy" >&2
+           exit 1;
+    fi
+
+    cd "$target"
+
+    perl -a -p -i -e '
+       if (substr($_,0,1) eq " ") {
+               if ($in_sha1 || $in_md5) {
+                       ($hash, $size, $file) = @F;
+                       $_="",next unless -f $file;
+
+                       (undef,undef,undef,undef,undef,undef,undef,$filesize,
+                        undef,undef,undef,undef,undef) = stat($file);
+                       if ($size != $filesize) {
+                               if ($in_sha1) {
+                                       $hash = `sha1sum "$file" | cut -d " " -f 1`
+                               } else {
+                                       $hash = `md5sum "$file" | cut -d " " -f 1`
+                               };
+                               chomp $hash;
+                               $_ = sprintf(" %s %16d %s\n", $hash, $filesize, $file);
+                       }
+               }
+       } else {
+               $in_sha1 = ($F[0] eq "SHA1:") ? 1 : 0;
+               $in_md5  = ($F[0] eq "MD5Sum:") ? 1 : 0;
+       }
+' Release
+
+    rm -f ${base}/buildd/dists/${suite}-backports/Release.gpg
+    gpg --no-options --batch --no-tty --secret-keyring ${base}/s3kr1t/dot-gnupg/secring.gpg --output "Release.gpg" --armor --detach-sign "Release"
+done
diff --git a/config/backports/common b/config/backports/common
new file mode 100644 (file)
index 0000000..b21a392
--- /dev/null
@@ -0,0 +1,91 @@
+# -*- mode:sh -*-
+# log something (basically echo it together with a timestamp)
+#
+# Set $PROGRAM to a string to have it added to the output.
+function log () {
+        if [ -z "${PROGRAM}" ]; then
+                echo "$(date +"%b %d %H:%M:%S") $(hostname -s) [$$] $@"
+        else
+                echo "$(date +"%b %d %H:%M:%S") $(hostname -s) ${PROGRAM}[$$]: $@"
+        fi
+}
+
+# log the message using log() but then also send a mail
+# to the address configured in MAILTO (if non-empty)
+function log_error () {
+        log "$@"
+        if [ -z "${MAILTO}" ]; then
+                echo "$@" | mail -e -s "[$PROGRAM@$(hostname -s)] ERROR [$$]" ${MAILTO}
+        fi
+}
+
+# debug log, only output when DEBUG=1
+function debug () {
+    if [ $DEBUG -eq 1 ]; then
+        log "$*"
+    fi
+}
+
+function wbtrigger() {
+    SSHOPT="-o BatchMode=yes -o ConnectTimeout=30 -o SetupTimeout=240"
+    if lockfile -r 3 -l 3600 "${LOCK_BUILDD}"; then
+        ssh -q -q ${SSHOPT} wbadm@buildd /org/wanna-build/trigger.often
+    fi
+    rm -f "${LOCK_BUILDD}"
+}
+
+# used by cron.dinstall *and* cron.unchecked.
+function make_buildd_dir () {
+    dak manage-build-queues -a
+
+    for dist in $(ls -1 ${incoming}/dists/); do
+        cd ${incoming}/dists/${dist}
+        mkdir -p tree/${STAMP}
+        cp -al ${incoming}/dists/${dist}/buildd/. tree/${STAMP}/
+        ln -sfT tree/${STAMP} ${incoming}/dists/${dist}/current
+        find ./tree -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
+    done
+
+}
+
+# Do the unchecked processing, in case we have files.
+function do_unchecked () {
+    cd $unchecked
+
+    changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
+    report=$queuedir/REPORT
+    timestamp=$(date "+%Y-%m-%d %H:%M")
+    UNCHECKED_WITHOUT_LOCK=${UNCHECKED_WITHOUT_LOCK:-""}
+
+    echo "$timestamp": ${changes:-"Nothing to do"}  >> $report
+    dak process-upload -a ${UNCHECKED_WITHOUT_LOCK} -d "$unchecked" >> $report
+}
+
+# Do the newstage processing, in case we have files.
+function do_newstage () {
+    cd $newstage
+
+    changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
+    report=$queuedir/REPORT
+    timestamp=$(date "+%Y-%m-%d %H:%M")
+    UNCHECKED_WITHOUT_LOCK=${UNCHECKED_WITHOUT_LOCK:-""}
+
+    echo "$timestamp": ${changes:-"Nothing to do in newstage"}  >> $report
+    dak process-upload -a ${UNCHECKED_WITHOUT_LOCK} -d "$newstage" >> $report
+}
+
+function sync_debbugs () {
+    # sync with debbugs
+    echo "--" >> $report
+    timestamp=$(date "+%Y-%m-%d-%H:%M")
+    mkdir -p $queuedir/${timestamp}
+    rsync -aq $queuedir/bts_version_track/ $queuedir/bts_version_track_archive/${timestamp}
+    rmdir --ignore-fail-on-non-empty $queuedir/${timestamp} # remove if empty.
+#    rsync -aq -e "ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30" --remove-source-files  $queuedir/bts_version_track/ bugs-sync:/org/bugs.debian.org/versions/queue/ftp-master/ 2>/dev/null && touch $lockdir/synced_bts_version || true
+    NOW=$(date +%s)
+    TSTAMP=$(stat -c %Y $lockdir/synced_bts_version)
+    DIFF=$(( NOW - TSTAMP ))
+    if [ $DIFF -ge 259200 ]; then
+        log "Kids, you tried your best and you failed miserably. The lesson is, never try. (Homer Simpson)"
+    fi
+}
diff --git a/config/backports/cron.daily b/config/backports/cron.daily
new file mode 100755 (executable)
index 0000000..1b9a059
--- /dev/null
@@ -0,0 +1,25 @@
+#! /bin/bash
+#
+
+set -e
+set -u
+
+export SCRIPTVARS=/srv/backports-master.debian.org/dak/config/backports/vars
+. $SCRIPTVARS
+
+################################################################################
+# Clean out old packages
+dak clean-suites -m 10000
+dak clean-queues
+
+# Send a report on NEW/BYHAND packages
+dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@backports.debian.org
+# and one on crufty packages
+
+dak cruft-report > $webdir/cruft-report-daily.txt
+cat $webdir/cruft-report-daily.txt | mail -e -s "Debian backports archive cruft report for $(date +%D)" ftpmaster@backports.debian.org
+
+cd $configdir
+apt-ftparchive -q clean apt.conf
+
+echo Daily cron scripts successful.
diff --git a/config/backports/cron.dinstall b/config/backports/cron.dinstall
new file mode 100755 (executable)
index 0000000..ab171f4
--- /dev/null
@@ -0,0 +1,433 @@
+#!/bin/bash
+# No way I try to deal with a crippled sh just for POSIX foo.
+
+# Copyright (C) 2009, 2010 Joerg Jaspert <joerg@debian.org>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; version 2.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+
+# Homer: Are you saying you're never going to eat any animal again? What
+#        about bacon?
+# Lisa: No.
+# Homer: Ham?
+# Lisa: No.
+# Homer: Pork chops?
+# Lisa: Dad, those all come from the same animal.
+# Homer: Heh heh heh. Ooh, yeah, right, Lisa. A wonderful, magical animal.
+
+# exit on errors
+set -e
+# make sure to only use defined variables
+set -u
+# ERR traps should be inherited from functions too. (And command
+# substitutions and subshells and whatnot, but for us the functions is
+# the important part here)
+set -E
+
+# import the general variable set.
+export SCRIPTVARS=/srv/backports-master.debian.org/dak/config/backports/vars
+. $SCRIPTVARS
+
+########################################################################
+# Functions                                                            #
+########################################################################
+# common functions are "outsourced"
+. "${configdir}/common"
+
+# source the dinstall functions
+. "${configdir}/dinstall.functions"
+
+########################################################################
+########################################################################
+
+# Function to save which stage we are in, so we can restart an interrupted
+# dinstall. Or even run actions in parallel, if we dare to, by simply
+# backgrounding the call to this function. But that should only really be
+# done for things we don't care much about.
+#
+# This should be called with the first argument being an array, with the
+# members
+#  - FUNC - the function name to call
+#  - ARGS - Possible arguments to hand to the function. Can be the empty string
+#  - TIME - The timestamp name. Can be the empty string
+#  - ERR  - if this is the string false, then the call will be surrounded by
+#           set +e ... set -e calls, so errors in the function do not exit
+#           dinstall. Can be the empty string, meaning true.
+#
+# MAKE SURE TO KEEP THIS THE LAST FUNCTION, AFTER ALL THE VARIOUS ONES
+# ADDED FOR DINSTALL FEATURES!
+function stage() {
+    ARGS='GO[@]'
+    local "${!ARGS}"
+
+    error=${ERR:-"true"}
+
+    STAGEFILE="${stagedir}/${FUNC}"
+    if [ -f "${STAGEFILE}" ]; then
+        stamptime=$(/usr/bin/stat -c %Z "${STAGEFILE}")
+        unixtime=$(date +%s)
+        difference=$(( $unixtime - $stamptime ))
+        if [ ${difference} -ge 14400 ]; then
+            log_error "Did already run ${FUNC}, stagefile exists, but that was ${difference} seconds ago. Please check."
+        else
+            log "Did already run ${FUNC}, not calling again..."
+        fi
+        return
+    fi
+
+    debug "Now calling function ${FUNC}. Arguments: ${ARGS}. Timestamp: ${TIME}"
+
+    # Make sure we are always at the same place. If a function wants to be elsewhere,
+    # it has to cd first!
+    cd ${configdir}
+
+    # Now redirect the output into $STAGEFILE.log. In case it errors out somewhere our
+    # errorhandler trap can then mail the contents of $STAGEFILE.log only, instead of a whole
+    # dinstall logfile. Short error mails ftw!
+    exec >> "${STAGEFILE}.log" 2>&1
+
+    if [ -f "${LOCK_STOP}" ]; then
+        log "${LOCK_STOP} exists, exiting immediately"
+        exit 42
+    fi
+
+    if [ "${error}" = "false" ]; then
+        set +e
+    fi
+    ${FUNC} ${ARGS}
+
+    # No matter what happened in the function, we make sure we have set -e default state back
+    set -e
+
+    # Make sure we are always at the same place.
+    cd ${configdir}
+
+    # We always use the same umask. If a function wants to do different, fine, but we reset.
+    umask 022
+
+    touch "${STAGEFILE}"
+
+    if [ -n "${TIME}" ]; then
+        ts "${TIME}"
+    fi
+
+    # And the output goes back to the normal logfile
+    exec >> "$LOGFILE" 2>&1
+
+    # Now we should make sure that we have a usable dinstall.log, so append the $STAGEFILE.log
+    # to it.
+    cat "${STAGEFILE}.log" >> "${LOGFILE}"
+    rm -f "${STAGEFILE}.log"
+
+    if [ -f "${LOCK_STOP}" ]; then
+        log "${LOCK_STOP} exists, exiting immediately"
+        exit 42
+    fi
+}
+
+########################################################################
+
+# We need logs.
+LOGFILE="$logdir/dinstall.log"
+
+exec >> "$LOGFILE" 2>&1
+
+# And now source our default config
+. "${configdir}/dinstall.variables"
+
+# Make sure we start out with a sane umask setting
+umask 022
+
+# And use one locale, no matter what the caller has set
+export LANG=C
+export LC_ALL=C
+
+# If we did not install new packages, we dont want to run.
+if ! [ -f "${DINSTALLPACKAGES}" ]; then
+    log "nothing to do"
+    exit 0
+fi
+rm -f "${DINSTALLPACKAGES}"
+
+touch "${DINSTALLSTART}"
+ts "startup"
+DINSTALLBEGIN="$(date -u +"%a %b %d %T %Z %Y (%s)")"
+state "Startup"
+
+lockfile -l 3600 "${LOCK_DAILY}"
+trap onerror ERR
+trap cleanup EXIT TERM HUP INT QUIT
+
+touch "${LOCK_BRITNEY}"
+
+GO=(
+    FUNC="savetimestamp"
+    TIME=""
+    ARGS=""
+    ERR="false"
+)
+stage $GO
+
+GO=(
+    FUNC="pgdump_pre"
+    TIME="pg_dump1"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+lockfile "$LOCK_ACCEPTED"
+lockfile "$LOCK_NEW"
+
+GO=(
+    FUNC="newstage"
+    TIME="newstage"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="cruft"
+    TIME="cruft"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+state "indices"
+
+GO=(
+    FUNC="dominate"
+    TIME="dominate"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="filelist"
+    TIME="generate-filelist"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+# GO=(
+#     FUNC="fingerprints"
+#     TIME="import-keyring"
+#     ARGS=""
+#     ERR="false"
+# )
+# stage $GO
+
+GO=(
+    FUNC="overrides"
+    TIME="overrides"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="mpfm"
+    TIME="pkg-file-mapping"
+    ARGS=""
+    ERR="false"
+)
+stage $GO
+
+state "packages/contents"
+GO=(
+    FUNC="packages"
+    TIME="apt-ftparchive"
+    ARGS=""
+    ERR=""
+)
+# Careful: When we ever go and remove this monster-long thing, we have to check the backgrounded
+# functions before it. We no longer have a 1.5hour sync point then.
+stage $GO
+
+state "dists/"
+GO=(
+    FUNC="pdiff"
+    TIME="pdiff"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="release"
+    TIME="release files"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="dakcleanup"
+    TIME="cleanup"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="buildd_dir"
+    TIME="buildd_dir"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+state "scripts"
+GO=(
+    FUNC="mkmaintainers"
+    TIME="mkmaintainers"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="mkuploaders"
+    TIME="mkuploaders"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="copyoverrides"
+    TIME="copyoverrides"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="mklslar"
+    TIME="mklslar"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="mkchecksums"
+    TIME="mkchecksums"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+GO=(
+    FUNC="mirror"
+    TIME="mirror hardlinks"
+    ARGS=""
+    ERR=""
+)
+stage $GO
+
+rm -f "$LOCK_ACCEPTED"
+rm -f "$LOCK_NEW"
+rm -f "${LOCK_DAILY}"
+
+ts "locked part finished"
+state "postlock"
+
+GO=(
+    FUNC="pgdump_post"
+    TIME="pg_dump2"
+    ARGS=""
+    ERR=""
+)
+stage $GO &
+
+GO=(
+    FUNC="expire"
+    TIME="expire_dumps"
+    ARGS=""
+    ERR=""
+)
+stage $GO &
+
+GO=(
+    FUNC="reports"
+    TIME="reports"
+    ARGS=""
+    ERR=""
+)
+stage $GO &
+
+# GO=(
+#     FUNC="dm"
+#     TIME=""
+#     ARGS=""
+#     ERR=""
+# )
+# stage $GO &
+
+GO=(
+    FUNC="mirrorpush"
+    TIME="mirrorpush"
+    ARGS=""
+    ERR="false"
+)
+stage $GO &
+
+GO=(
+    FUNC="stats"
+    TIME="stats"
+    ARGS=""
+    ERR="false"
+)
+stage $GO &
+
+rm -f "${LOCK_BRITNEY}"
+
+GO=(
+    FUNC="compress"
+    TIME="compress"
+    ARGS=""
+    ERR=""
+)
+stage $GO &
+
+# GO=(
+#     FUNC="aptftpcleanup"
+#     TIME="apt-ftparchive cleanup"
+#     ARGS=""
+#     ERR="false"
+# )
+# stage $GO
+
+log "Daily cron scripts successful, all done"
+
+exec > "$logdir/afterdinstall.log" 2>&1
+
+GO=(
+    FUNC="renamelogfile"
+    TIME=""
+    ARGS=""
+    ERR="false"
+)
+stage $GO
+state "all done"
+
+
+# Now, at the very (successful) end of dinstall, make sure we remove
+# our stage files, so the next dinstall run will do it all again.
+rm -f ${stagedir}/*
+touch "${DINSTALLEND}"
diff --git a/config/backports/cron.monthly b/config/backports/cron.monthly
new file mode 100755 (executable)
index 0000000..d2f403f
--- /dev/null
@@ -0,0 +1,32 @@
+#!/bin/bash
+#
+
+set -e
+set -u
+
+export SCRIPTVARS=/srv/backports-master.debian.org/dak/config/backports/vars
+. $SCRIPTVARS
+
+################################################################################
+
+DATE=`date -d yesterday +%y%m`
+
+cd ${base}/mail/archive
+for m in mail import; do
+    if [ -f $m ]; then
+        mv $m ${m}-$DATE
+        sleep 20
+        gzip -9 ${m}-$DATE
+        chgrp backports ${m}-$DATE.gz
+        chmod 660 ${m}-$DATE.gz
+    fi;
+done
+
+DATE=`date +%Y-%m`
+cd ${base}/log
+touch $DATE
+ln -sf $DATE current
+chmod g+w $DATE
+chown dak:backports $DATE
+
+################################################################################
diff --git a/config/backports/cron.reboot b/config/backports/cron.reboot
new file mode 100755 (executable)
index 0000000..a5a132e
--- /dev/null
@@ -0,0 +1,76 @@
+#!/bin/bash
+# No way I try to deal with a crippled sh just for POSIX foo.
+
+# Copyright (C) 2009 Joerg Jaspert <joerg@debian.org>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; version 2.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+
+# exit on errors
+set -e
+# make sure to only use defined variables
+set -u
+# ERR traps should be inherited from functions too. (And command
+# substitutions and subshells and whatnot, but for us the functions is
+# the important part here)
+set -E
+
+# import the general variable set.
+export SCRIPTVARS=/srv/backports-master.debian.org/dak/config/backports/vars
+. $SCRIPTVARS
+
+# common functions are "outsourced"
+. "${configdir}/common"
+
+# usually we are not using debug logs. Set to 1 if you want them.
+DEBUG=0
+
+# our name
+PROGRAM="dinstall_reboot"
+
+# where do we want mails to go? For example log entries made with error()
+if [ "x$(hostname -s)x" != "xfranckx" ]; then
+    # Not our ftpmaster host
+    MAILTO=${MAILTO:-"root"}
+else
+    # Yay, ftpmaster
+    MAILTO=${MAILTO:-"ftpmaster@backports.debian.org"}
+fi
+
+# Marker for dinstall start
+DINSTALLSTART="${lockdir}/dinstallstart"
+# Marker for dinstall end
+DINSTALLEND="${lockdir}/dinstallend"
+
+set +e
+starttime=$(/usr/bin/stat -c %Z "${DINSTALLSTART}")
+endtime=$(/usr/bin/stat -c %Z "${DINSTALLEND}")
+set -e
+
+if [ ${endtime} -gt ${starttime} ]; then
+       # Great, last dinstall run did seem to end without trouble, no need to rerun
+       log "Last dinstall run did end without trouble, not rerunning"
+       exit 0
+else
+       # Hrm, it looks like we did not successfully end the last run.
+       # This either means dinstall did abort due to an error, or we had a reboot
+       # No way to tell, so lets restart and see what happens.
+
+       # Make sure we are not fooled by some random touching of the files, only
+       # really restart if we have the first stage stampfile there, indicating that
+       # dinstall got started
+       if [ -f "${stagedir}/savetimestamp" ]; then
+               log "Seems we have to restart a dinstall run after reboot"
+               ${configdir}/cron.dinstall
+       fi
+fi
diff --git a/config/backports/cron.unchecked b/config/backports/cron.unchecked
new file mode 100755 (executable)
index 0000000..08fcbf3
--- /dev/null
@@ -0,0 +1,108 @@
+#! /bin/bash
+
+# Copyright (C) 2009 Joerg Jaspert <joerg@debian.org>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; version 2.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+
+# exit on errors
+set -e
+# make sure to only use defined variables
+set -u
+# ERR traps should be inherited from functions too. (And command
+# substitutions and subshells and whatnot, but for us the functions is
+# the important part here)
+set -E
+
+export SCRIPTVARS=/srv/backports-master.debian.org/dak/config/backports/vars
+. $SCRIPTVARS
+
+
+LOCKDAILY=""
+LOCKFILE="$lockdir/unchecked.lock"
+LOCK_NEW="$lockdir/processnew.lock"
+NOTICE="$lockdir/daily.lock"
+LOCK_BUILDD="$lockdir/buildd.lock"
+# The state file telling us we have something new to do
+DINSTALLPACKAGES="${lockdir}/dinstall.packages"
+
+# our name
+PROGRAM="unchecked"
+
+if [ -e $NOTICE ]; then
+    exit 0;
+fi
+
+########################################################################
+# Functions                                                            #
+########################################################################
+# common functions are "outsourced"
+. "${configdir}/common"
+
+STAMP=$(date "+%Y%m%d%H%M")
+
+cleanup() {
+    rm -f "$LOCKFILE"
+    if [ ! -z "$LOCKDAILY" ]; then
+        rm -f "$NOTICE"
+    fi
+}
+
+function do_buildd () {
+    if lockfile -r3 $NOTICE; then
+        LOCKDAILY="YES"
+        cd $overridedir
+        dak make-overrides &>/dev/null
+        rm -f override.sid.all3 override.sid.all3.src
+        for i in main contrib non-free main.debian-installer; do
+            cat override.lenny-backports.$i >> override.sid.all3
+            if [ "$i" != "main.debian-installer" ]; then
+                cat override.lenny-backports.$i.src >> override.sid.all3.src
+            fi
+        done
+        make_buildd_dir
+        wbtrigger
+    fi
+}
+
+function do_dists () {
+    cd $configdir
+    dak generate-filelist
+    GZIP='--rsyncable' ; export GZIP
+    dak generate-packages-sources
+}
+
+########################################################################
+# the actual unchecked functions follow                                #
+########################################################################
+
+# And use one locale, no matter what the caller has set
+export LANG=C
+export LC_ALL=C
+
+# only run one cron.unchecked
+lockfile -r3 $LOCKFILE || exit 0
+trap cleanup 0
+
+do_newstage
+do_unchecked
+
+if [ ! -z "$changes" ]; then
+    touch ${DINSTALLPACKAGES}
+    sync_debbugs
+    do_buildd
+
+#    echo "Starting g-p-s: $(date +%H:%M:%S)"
+#    do_dists
+#    echo "Done with g-p-s: $(date +%H:%M:%S)"
+fi
diff --git a/config/backports/cron.weekly b/config/backports/cron.weekly
new file mode 100755 (executable)
index 0000000..fcdffb0
--- /dev/null
@@ -0,0 +1,51 @@
+#!/bin/bash
+#
+
+set -e
+set -u
+# ERR traps should be inherited from functions too. (And command
+# substitutions and subshells and whatnot, but for us the functions is
+# the important part here)
+set -E
+export SCRIPTVARS=/srv/backports-master.debian.org/dak/config/backports/vars
+. $SCRIPTVARS
+
+# Start logging
+NOW=`date "+%Y.%m.%d-%H:%M:%S"`
+LOGFILE="$logdir/weekly_${NOW}.log"
+exec > "$LOGFILE" 2>&1
+
+cleanup() {
+  echo "Cleanup"
+  rm -f "$LOGFILE"
+}
+trap cleanup 0
+
+################################################################################
+
+# Purge empty directories
+echo "Purging empty directories in $ftpdir/pool/"
+if [ ! -z "$(find $ftpdir/pool/ -type d -empty)" ]; then
+   find $ftpdir/pool/ -type d -empty | xargs rmdir;
+fi
+
+# Clean up apt-ftparchive's databases
+
+echo "Splitting queue/done"
+dak split-done
+
+# Clean up apt-ftparchive's databases
+cd $configdir
+echo "Cleanup apt-ftparchive's database"
+apt-ftparchive -q clean apt.conf
+
+echo "Fixing symlinks in $ftpdir"
+symlinks -d -r $ftpdir
+
+echo "Finally, all is done, compressing logfile"
+exec > /dev/null 2>&1
+
+bzip2 -9 "$LOGFILE"
+
+
+################################################################################
diff --git a/config/backports/dak.conf b/config/backports/dak.conf
new file mode 100644 (file)
index 0000000..422dce6
--- /dev/null
@@ -0,0 +1,462 @@
+Dinstall
+{
+   // Both need to be defined at the moment, but they can point to the
+   // same file.
+   GPGKeyring {
+      "/srv/backports-master.debian.org/keyrings/keyring.gpg";
+   };
+   // To sign the release files. Adjust the keyid!
+   // Note: Key must be without a passphrase or it wont work automagically!
+   SigningKeyring "/srv/backports-master.debian.org/s3kr1t/dot-gnupg/secring.gpg";
+   SigningPubKeyring "/srv/backports-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg";
+   SigningKeyIds "55BE302B";
+   SendmailCommand "/usr/sbin/sendmail -oi -t -f envelope@ftp-master.debian.org";
+   MyEmailAddress "backports.debian.org archive Installer <installer@backports.debian.org>";
+   MyAdminAddress "ftpmaster@backports.debian.org";
+   MyHost "backports.debian.org";  // used for generating user@my_host addresses in e.g. manual_reject()
+   MyDistribution "backports.debian.org archive"; // Used in emails
+   // Alicia and melanie can use it
+   BugServer "bugs.debian.org";
+   // melanie uses the packages server.
+   // PackagesServer "packages.test.backports.org";
+   // If defined then the package@this.server gets a copy of most of the
+   // actions related to the package. For an example look at
+   // packages.qa.debian.org
+   // TrackingServer "packages.qa.test.backports.org";
+   LockFile "/srv/backports-master.debian.org/lock/dinstall.lock";
+   // If defined this address gets a bcc of all mails.
+   // FIXME: Einrichten wenn das hier produktiv geht!
+   Bcc "debian-backports-changes@lists.debian.org";
+   GroupOverrideFilename "override.group-maint";
+   FutureTimeTravelGrace 28800; // 8 hours
+   PastCutoffYear "1984";
+   SkipTime 300;
+   // If defined then mails to close bugs are sent to the bugserver.
+   CloseBugs "false";
+   OverrideDisparityCheck "true";
+   DefaultSuite "lenny-backports";
+   Reject
+   {
+     NoSourceOnly "true";
+     ReleaseTransitions "/srv/backports-master.debian.org/hints/transitions.yaml";
+   };
+   // If set, only send mails to addresses listed there.
+   // format of entries: one entry per line. Either an email address directly, or a regular expression,
+   // prefixed by "RE:". Examples: "jane.doe@domain.com" or "RE:jane[^@]@domain.com", where the first will
+   // only allow to mail jane.doe@domain.com while the second will mail all of jane*@domain.com
+   MailWhiteList "/srv/backports-master.debian.org/dak/config/backports/mail-whitelist";
+};
+
+Generate-Index-Diffs
+{
+   Options
+   {
+     TempDir "/srv/backports-master.debian.org/tiffani";
+     MaxDiffs { Default 50; };
+   };
+};
+
+Override
+{
+   MyEmailAddress "backports.debian.org archive FTP Masters <ftpmaster@backports.debian.org>";
+};
+
+Add-User
+{
+// Should we sent a mail to newly added users?
+  SendEmail "true";
+
+// Should we create an account so they can login?
+// Account will be created with the defaults from adduser, so adjust
+// it's configuration to fit your needs.
+// NOTE: This requires that your dak user has a sudo entry, allowing
+// to run /usr/sbin/useradd!
+  CreateAccount "false";
+
+// Note: This is a comma separated list of additional groupnames to
+// which uma should add the user. NO spaces between the groupnames or
+// useradd will die.
+// Disable it if you dont want or need that feature.
+//  GID "debuser";
+
+};
+
+Show-New
+{
+  HTMLPath "/srv/backports-web.debian.org/underlay/new/";
+}
+
+Check-Overrides
+{
+  OverrideSuites
+  {
+    lenny-backports
+    {
+      Process "1";
+//      OriginSuite "Unstable";
+    };
+
+    squeeze-backports
+    {
+      Process "1";
+//      OriginSuite "Unstable";
+    };
+
+    etch-backports
+    {
+      Process "1";
+//      OriginSuite "Unstable";
+    };
+
+//    Unstable
+//    {
+//    Process "0";
+//  };
+  };
+};
+
+
+Import-Users-From-Passwd
+{
+  // The Primary GID of your users. Using uma it is the gid from group users.
+  ValidGID "800";
+  // Comma separated list of users who are in Postgres but not the passwd file
+  KnownPostgres "postgres,katie,dak,www-data,qa,guest";
+};
+
+Queue-Report
+{
+  ReportLocations
+  {
+    822Location "/srv/backports-web.debian.org/underlay/new.822";
+  };
+};
+
+Clean-Queues
+{
+  Options
+  {
+    Days 14;
+   };
+ MorgueSubDir "queues";
+};
+
+Control-Overrides
+{
+  Options
+  {
+    Component "main";
+    Suite "lenny-backports";
+    Type "deb";
+   };
+
+ ComponentPosition "prefix"; // Whether the component is prepended or appended to the section name
+};
+
+Rm
+{
+  Options
+  {
+    Suite "lenny-backports";
+   };
+
+   MyEmailAddress "backports.debian.org archive Maintenance <ftpmaster@backports.debian.org>";
+   LogFile "/srv/backports-master.debian.org/ftp/removals.txt";
+   LogFile822 "/srv/backports-master.debian.org/ftp/removals.822";
+};
+
+Import-Archive
+{
+  ExportDir "/srv/backports-master.debian.org/dak/import-archive-files/";
+};
+
+Clean-Suites
+{
+  // How long (in seconds) dead packages are left before being killed
+  StayOfExecution 1209600; // 14 days
+  AcceptedAutoBuildStayOfExecution 86400; // 24 hours
+  MorgueSubDir "pool";
+};
+
+Process-New
+{
+  DinstallLockFile "/srv/backports-master.debian.org/lock/processnew.lock";
+  LockDir "/srv/backports-master.debian.org/lock/new/";
+};
+
+Suite
+{
+  lenny-backports
+  {
+       Components
+       {
+         main;
+         contrib;
+         non-free;
+       };
+
+       Announce "debian-backports-changes@lists.debian.org";
+       Origin "Debian Backports";
+       Description "Backports for the Lenny Distribution";
+       CodeName "lenny-backports";
+       OverrideCodeName "lenny-backports";
+       Priority "7";
+       NotAutomatic "yes";
+  };
+
+
+  squeeze-backports
+  {
+       Components
+       {
+         main;
+         contrib;
+         non-free;
+       };
+
+       Announce "debian-backports-changes@lists.debian.org";
+       Origin "Debian Backports";
+       Description "Backports for the Squeeze Distribution";
+       CodeName "squeeze-backports";
+       OverrideCodeName "squeeze-backports";
+       Priority "7";
+       NotAutomatic "yes";
+  };
+
+  etch-backports
+  {
+       Components
+       {
+         main;
+         contrib;
+         non-free;
+       };
+       Announce "debian-backports-changes@lists.debian.org";
+       Origin "backports.debian.org archive";
+       Description "Backports for the Etch Distribution";
+       CodeName "etch-backports";
+       OverrideCodeName "etch-backports";
+       Priority "7";
+       NotAutomatic "yes";
+  };
+
+};
+
+Dir
+{
+  Root "/srv/backports-master.debian.org/ftp/";
+  Pool "/srv/backports-master.debian.org/ftp/pool/";
+  Templates "/srv/backports-master.debian.org/dak/templates/";
+  PoolRoot "pool/";
+  Lists "/srv/backports-master.debian.org/database/dists/";
+  Cache "/srv/backports-master.debian.org/database/";
+  Log "/srv/backports-master.debian.org/log/";
+  Lock "/srv/backports-master.debian.org/lock";
+  Morgue "/srv/backports-master.debian.org/morgue/";
+  MorgueReject "reject";
+  Override "/srv/backports-master.debian.org/scripts/override/";
+  QueueBuild "/srv/backports-master.debian.org/buildd/";
+  UrgencyLog "/srv/backports-master.debian.org/testing/urgencies/";
+  TempPath "/srv/backports-master.debian.org/tmp";
+  Queue
+  {
+    Byhand "/srv/backports-master.debian.org/queue/byhand/";
+    Done "/srv/backports-master.debian.org/queue/done/";
+    Holding "/srv/backports-master.debian.org/queue/holding/";
+    New "/srv/backports-master.debian.org/queue/new/";
+    Reject "/srv/backports-master.debian.org/queue/reject/";
+    Unchecked "/srv/backports-master.debian.org/queue/unchecked/";
+    BTSVersionTrack "/srv/backports-master.debian.org/queue/bts_version_track/";
+    Newstage "/srv/backports-master.debian.org/queue/newstage/";
+    Embargoed "/srv/backports-master.debian.org/queue/Embargoed/";
+    Unembargoed "/srv/backports-master.debian.org/queue/Unembargoed/";
+    ProposedUpdates "/srv/backports-master.debian.org/queue/p-u-new/";
+    OldProposedUpdates "/srv/backports-master.debian.org/queue/Unembargoed/";
+  };
+};
+
+DB
+{
+  Name "projectb";
+  Host "";
+  Port -1;
+};
+
+SuiteMappings
+{
+ "map lenny lenny-backports";
+ "map lenny-bpo lenny-backports";
+ "map squeeze squeeze-backports";
+ "map squeeze-bpo squeeze-backports";
+};
+
+Architectures
+{
+  source "Source";
+  all "Architecture Independent";
+  alpha "DEC Alpha";
+  amd64 "AMD x86_64 (AMD64)";
+  hurd-i386 "Intel ia32 running the HURD";
+  hppa "HP PA RISC";
+  arm "ARM";
+  armel "ARM EABI";
+  i386 "Intel ia32";
+  ia64 "Intel ia64";
+  m68k "Motorola Mc680x0";
+  mips "MIPS (Big Endian)";
+  mipsel "MIPS (Little Endian)";
+  powerpc "PowerPC";
+  s390 "IBM S/390";
+  sh "Hitatchi SuperH";
+  sparc "Sun SPARC/UltraSPARC";
+  kfreebsd-i386 "GNU/kFreeBSD i386";
+  kfreebsd-amd64 "GNU/kFreeBSD amd64";
+};
+
+Archive
+{
+  backports
+  {
+    OriginServer "backports.debian.org";
+    PrimaryMirror "backports.debian.org";
+    Description "Master Archive for backports.debian.org archive";
+  };
+};
+
+Component
+{
+  main
+  {
+       Description "Main";
+       MeetsDFSG "true";
+  };
+
+  contrib
+  {
+       Description "Contrib";
+       MeetsDFSG "true";
+  };
+
+  non-free
+  {
+     Description "Software that fails to meet the DFSG";
+     MeetsDFSG "false";
+  };
+
+};
+
+Section
+{
+  admin;
+  cli-mono;
+  comm;
+  database;
+  debian-installer;
+  debug;
+  devel;
+  doc;
+  editors;
+  embedded;
+  electronics;
+  fonts;
+  games;
+  gnome;
+  graphics;
+  gnu-r;
+  gnustep;
+  hamradio;
+  haskell;
+  httpd;
+  interpreters;
+  java;
+  kde;
+  kernel;
+  libdevel;
+  libs;
+  lisp;
+  localization;
+  mail;
+  math;
+  misc;
+  net;
+  news;
+  ocaml;
+  oldlibs;
+  otherosfs;
+  perl;
+  php;
+  python;
+  ruby;
+  science;
+  shells;
+  sound;
+  tex;
+  text;
+  utils;
+  web;
+  vcs;
+  video;
+  x11;
+  xfce;
+  zope;
+};
+
+Priority
+{
+  required 1;
+  important 2;
+  standard 3;
+  optional 4;
+  extra 5;
+  source 0; // i.e. unused
+};
+
+OverrideType
+{
+  deb;
+  udeb;
+  dsc;
+};
+
+Location
+{
+  // Pool locations on backports.debian.org
+  /srv/backports-master.debian.org/ftp/pool/
+    {
+      Archive "backports";
+      Type "pool";
+    };
+
+};
+
+Urgency
+{
+  Default "low";
+  Valid
+  {
+    low;
+    medium;
+    high;
+    emergency;
+    critical;
+  };
+};
+
+Common
+{
+  // The default number of threads for multithreading parts of dak:
+  ThreadCount 16;
+}
+
+Import-LDAP-Fingerprints
+{
+  LDAPDn "ou=users,dc=debian,dc=org";
+  LDAPServer "db.debian.org";
+  ExtraKeyrings
+  {
+    "/srv/keyring.debian.org/keyrings/removed-keys.pgp";
+    "/srv/keyring.debian.org/keyrings/removed-keys.gpg";
+    "/srv/keyring.debian.org/keyrings/extra-keys.pgp";
+  };
+  KeyServer "wwwkeys.eu.pgp.net";
+};
diff --git a/config/backports/dak.conf-etc b/config/backports/dak.conf-etc
new file mode 100644 (file)
index 0000000..bf7d26c
--- /dev/null
@@ -0,0 +1,22 @@
+Config
+{
+  // FQDN hostname
+  morricone.debian.org
+  {
+
+    // Optional hostname as it appears in the database (if it differs
+    // from the FQDN hostname).
+    DatabaseHostname     "backports";
+
+    // Optional filename of katie's config file; if not present, this
+    // file is assumed to contain katie config info.
+    DakConfig          "/srv/backports-master.debian.org/dak/config/backports/dak.conf";
+
+    // Optional filename of apt-ftparchive's config file; if not
+    // present, the file is assumed to be 'apt.conf' in the same
+    // directory as this file.
+    AptConfig          "/srv/backports-master.debian.org/dak/config/backports/apt.conf";
+  }
+
+}
+
diff --git a/config/backports/dinstall.functions b/config/backports/dinstall.functions
new file mode 100644 (file)
index 0000000..0ae8172
--- /dev/null
@@ -0,0 +1,606 @@
+# -*- mode:sh -*-
+# Timestamp. Used for dinstall stat graphs
+function ts() {
+        echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
+}
+
+# Cleanup actions
+function cleanup() {
+       rm -f ${LOCK_DAILY}
+       rm -f ${LOCK_ACCEPTED}
+}
+
+# If we error out this one is called, *FOLLOWED* by cleanup above
+function onerror() {
+    ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
+
+    subject="ATTENTION ATTENTION!"
+    if [ "${error}" = "false" ]; then
+        subject="${subject} (continued)"
+    else
+        subject="${subject} (interrupted)"
+    fi
+    subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
+
+    cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@backports-master.debian.org
+}
+
+########################################################################
+# the actual dinstall functions follow                                 #
+########################################################################
+
+# pushing merkels QA user, part one
+function merkel1() {
+    log "Telling merkels QA user that we start dinstall"
+    ssh -2 -i ~dak/.ssh/push_merkel_qa  -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
+}
+
+# Create the postgres dump files
+function pgdump_pre() {
+    log "Creating pre-daily-cron-job backup of $PGDATABASE database..."
+    pg_dump > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
+}
+
+function pgdump_post() {
+    log "Creating post-daily-cron-job backup of $PGDATABASE database..."
+    cd $base/backup
+    POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
+    pg_dump > $base/backup/dump_$POSTDUMP
+    #pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
+    ln -sf $base/backup/dump_$POSTDUMP current
+    #ln -sf $base/backup/dumpall_$POSTDUMP currentall
+}
+
+# Load the dak-dev projectb
+function pgdakdev() {
+    # Make sure to unset any possible psql variables so we don't drop the wrong
+    # f****** database by accident
+    local PGDATABASE
+    unset PGDATABASE
+    local PGHOST
+    unset PGHOST
+    local PGPORT
+    unset PGPORT
+    local PGUSER
+    unset PGUSER
+    cd $base/backup
+    echo "drop database projectb" | psql -p 5434 template1
+       #cat currentall | psql -p 5433 template1
+    createdb -p 5434 -T template1 projectb
+    fgrep -v '\connect' current | psql -p 5434 projectb
+}
+
+# Updating various files
+function updates() {
+    log "Updating Bugs docu, Mirror list and mailing-lists.txt"
+    cd $configdir
+    $scriptsdir/update-bugdoctxt
+    $scriptsdir/update-mirrorlists
+    $scriptsdir/update-mailingliststxt
+    $scriptsdir/update-pseudopackages.sh
+}
+
+# Process (oldstable)-proposed-updates "NEW" queue
+function punew_do() {
+    date -u -R >> REPORT
+    dak process-policy $1 | tee -a REPORT | mail -e -s "NEW changes in $1" debian-release@lists.debian.org
+    echo >> REPORT
+}
+function punew() {
+    log "Doing automated p-u-new processing"
+    cd "${queuedir}/p-u-new"
+    punew_do "$1"
+}
+function opunew() {
+    log "Doing automated o-p-u-new processing"
+    cd "${queuedir}/o-p-u-new"
+    punew_do "$1"
+}
+
+# The first i18n one, syncing new descriptions
+function i18n1() {
+    log "Synchronizing i18n package descriptions"
+    # First sync their newest data
+    cd ${scriptdir}/i18nsync
+    rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
+
+    # Now check if we still know about the packages for which they created the files
+    # is the timestamp signed by us?
+    if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
+        # now read it. As its signed by us we are sure the content is what we expect, no need
+        # to do more here. And we only test -d a directory on it anyway.
+        TSTAMP=$(cat timestamp)
+        # do we have the dir still?
+        if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
+            # Lets check!
+            if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
+                # Yay, worked, lets copy around
+                for dir in squeeze sid; do
+                    if [ -d dists/${dir}/ ]; then
+                        cd dists/${dir}/main/i18n
+                        rsync -aq --delete --delete-after  . ${ftpdir}/dists/${dir}/main/i18n/.
+                    fi
+                    cd ${scriptdir}/i18nsync
+                done
+            else
+                echo "ARRRR, bad guys, wrong files, ARRR"
+                echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
+            fi
+        else
+            echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
+            echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
+        fi
+    else
+        echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
+        echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
+    fi
+}
+
+function cruft() {
+    log "Checking for cruft in overrides"
+    dak check-overrides
+}
+
+function dominate() {
+    log "Removing obsolete source and binary associations"
+    dak dominate
+}
+
+function filelist() {
+    log "Generating file lists for apt-ftparchive"
+    dak generate-filelist
+}
+
+function fingerprints() {
+    log "Not updating fingerprints - scripts needs checking"
+
+    log "Updating fingerprints"
+    dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
+
+    OUTFILE=$(mktemp)
+    dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
+
+    if [ -s "${OUTFILE}" ]; then
+        /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
+From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
+To: <debian-project@lists.debian.org>
+Subject: Debian Maintainers Keyring changes
+Content-Type: text/plain; charset=utf-8
+MIME-Version: 1.0
+
+The following changes to the debian-maintainers keyring have just been activated:
+
+$(cat $OUTFILE)
+
+Debian distribution maintenance software,
+on behalf of the Keyring maintainers
+
+EOF
+    fi
+    rm -f "$OUTFILE"
+}
+
+function overrides() {
+    log "Writing overrides into text files"
+    cd $overridedir
+    dak make-overrides
+
+    # FIXME
+    rm -f override.sid.all3
+    for i in main contrib non-free main.debian-installer; do cat override.lenny-backports.$i >> override.sid.all3; done
+}
+
+function mpfm() {
+    log "Generating package / file mapping"
+    dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
+}
+
+function packages() {
+    log "Generating Packages and Sources files"
+    cd $configdir
+    GZIP='--rsyncable' ; export GZIP
+    apt-ftparchive generate apt.conf
+    #dak generate-packages-sources
+}
+
+function pdiff() {
+    log "Generating pdiff files"
+    dak generate-index-diffs
+}
+
+function release() {
+    log "Generating Release files"
+    dak generate-releases
+}
+
+function dakcleanup() {
+    log "Cleanup old packages/files"
+    dak clean-suites -m 10000
+    dak clean-queues
+}
+
+function buildd_dir() {
+    # Rebuilt the buildd dir to avoid long times of 403
+    log "Regenerating the buildd incoming dir"
+    STAMP=$(date "+%Y%m%d%H%M")
+    make_buildd_dir
+}
+
+function mklslar() {
+    cd $ftpdir
+
+    FILENAME=ls-lR
+
+    log "Removing any core files ..."
+    find -type f -name core -print0 | xargs -0r rm -v
+
+    log "Checking permissions on files in the FTP tree ..."
+    find -type f \( \! -perm -444 -o -perm +002 \) -ls
+    find -type d \( \! -perm -555 -o -perm +002 \) -ls
+
+    log "Checking symlinks ..."
+    symlinks -rd .
+
+    log "Creating recursive directory listing ... "
+    rm -f .${FILENAME}.new
+    TZ=UTC ls -lR > .${FILENAME}.new
+
+    if [ -r ${FILENAME}.gz ] ; then
+        mv -f ${FILENAME}.gz ${FILENAME}.old.gz
+        mv -f .${FILENAME}.new ${FILENAME}
+        rm -f ${FILENAME}.patch.gz
+        zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
+        rm -f ${FILENAME}.old.gz
+    else
+        mv -f .${FILENAME}.new ${FILENAME}
+    fi
+
+    gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
+    rm -f ${FILENAME}
+}
+
+function mkmaintainers() {
+    log 'Creating Maintainers index ... '
+
+    cd $indices
+    dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
+        sed -e "s/~[^  ]*\([   ]\)/\1/"  | \
+        awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
+
+    if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
+           log "installing Maintainers ... "
+           mv -f .new-maintainers Maintainers
+           gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
+           mv -f .new-maintainers.gz Maintainers.gz
+    else
+        rm -f .new-maintainers
+    fi
+}
+
+function mkuploaders() {
+    log 'Creating Uploaders index ... '
+
+    cd $indices
+    dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
+        sed -e "s/~[^  ]*\([   ]\)/\1/"  | \
+        awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders
+
+    if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then
+           log "installing Uploaders ... "
+           mv -f .new-uploaders Uploaders
+           gzip --rsyncable -9v <Uploaders >.new-uploaders.gz
+           mv -f .new-uploaders.gz Uploaders.gz
+    else
+        rm -f .new-uploaders
+    fi
+}
+
+function copyoverrides() {
+    log 'Copying override files into public view ...'
+
+    for ofile in $copyoverrides ; do
+           cd $overridedir
+           chmod g+w override.$ofile
+
+           cd $indices
+
+           newofile=override.$ofile.gz
+           rm -f .newover-$ofile.gz
+           pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`"
+        if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then
+                   log "   installing new $newofile $pc"
+                   mv -f .newover-$ofile.gz $newofile
+                   chmod g+w $newofile
+        else
+                   rm -f .newover-$ofile.gz
+           fi
+    done
+}
+
+function mkfilesindices() {
+    umask 002
+    cd $base/ftp/indices/files/components
+
+    ARCHLIST=$(tempfile)
+
+    log "Querying $PGDATABASE..."
+    echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST
+
+    includedirs () {
+        perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
+    }
+    poolfirst () {
+        perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
+    }
+
+    log "Generating sources list"
+    (
+        sed -n 's/|$//p' $ARCHLIST
+        cd $base/ftp
+        find ./dists -maxdepth 1 \! -type d
+        find ./dists \! -type d | grep "/source/"
+    ) | sort -u | gzip --rsyncable -9 > source.list.gz
+
+    log "Generating arch lists"
+
+    ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
+    for a in $ARCHES; do
+        (sed -n "s/|$a$//p" $ARCHLIST
+            sed -n 's/|all$//p' $ARCHLIST
+
+            cd $base/ftp
+            find ./dists -maxdepth 1 \! -type d
+            find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
+        ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
+    done
+
+    log "Generating suite lists"
+
+    suite_list () {
+        printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t
+
+        printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t
+    }
+
+    printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At |
+    while read id suite; do
+        [ -e $base/ftp/dists/$suite ] || continue
+        (
+            (cd $base/ftp
+                distname=$(cd dists; readlink $suite || echo $suite)
+                find ./dists/$distname \! -type d
+                for distdir in ./dists/*; do
+                    [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
+                done
+            )
+            suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,'
+        ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
+    done
+
+    log "Finding everything on the ftp site to generate sundries"
+    (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
+
+    rm -f sundries.list
+    zcat *.list.gz | cat - *.list | sort -u |
+    diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
+
+    log "Generating files list"
+
+    for a in $ARCHES; do
+        (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
+        cat - sundries.list dists.list project.list docs.list indices.list |
+        sort -u | poolfirst > ../arch-$a.files
+    done
+
+    (cd $base/ftp/
+           for dist in sid squeeze; do
+                   find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
+           done
+    )
+
+    (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
+    sort -u | poolfirst > ../typical.files
+
+    rm -f $ARCHLIST
+    log "Done!"
+}
+
+function mkchecksums() {
+    dsynclist=$dbdir/dsync.list
+    md5list=$indices/md5sums
+
+    log -n "Creating md5 / dsync index file ... "
+
+    cd "$ftpdir"
+    ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
+    ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
+    ${bindir}/dsync-flist -q link-dups $dsynclist || true
+}
+
+function mirror() {
+    log "Regenerating \"public\" mirror/ hardlink fun"
+    DATE_SERIAL=$(date +"%Y%m%d01")
+    FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
+    if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
+        SERIAL="$DATE_SERIAL"
+    else
+        SERIAL="$FILESOAPLUS1"
+    fi
+    date -u > ${TRACEFILE}
+    echo "Using dak v1" >> ${TRACEFILE}
+    echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
+    echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
+    cd ${mirrordir}
+    rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
+}
+
+function expire() {
+    log "Expiring old database dumps..."
+    cd $base/backup
+    $scriptsdir/expire_dumps -d . -p -f "dump_*"
+}
+
+function transitionsclean() {
+    log "Removing out of date transitions..."
+    cd $base
+    dak transitions -c -a
+}
+
+function reports() {
+    # Send a report on NEW/BYHAND packages
+    log "Nagging ftpteam about NEW/BYHAND packages"
+    dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@backports-master.debian.org
+    # and one on crufty packages
+    log "Sending information about crufty packages"
+    dak cruft-report > $webdir/cruft-report-daily.txt
+#    dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
+    cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@backports-master.debian.org
+}
+
+function dm() {
+    log "Updating DM html page"
+    $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
+}
+
+function bts() {
+    log "Categorizing uncategorized bugs filed against ftp.debian.org"
+    dak bts-categorize
+}
+
+function merkel2() {
+    # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
+    log "Trigger merkel/flotows $PGDATABASE sync"
+    ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
+    # Also trigger flotow, the ftpmaster test box
+    ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
+}
+
+function merkel3() {
+    # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
+    log "Trigger merkels dd accessible parts sync"
+    ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
+}
+
+function mirrorpush() {
+    log "Starting the mirrorpush"
+    date -u > /srv/backports-web.debian.org/underlay/mirrorstart
+    echo "Using dak v1" >> /srv/backports-web.debian.org/underlay/mirrorstart
+    echo "Running on host $(hostname -f)" >> /srv/backports-web.debian.org/underlay/mirrorstart
+    sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
+}
+
+function i18n2() {
+    log "Exporting package data foo for i18n project"
+    STAMP=$(date "+%Y%m%d%H%M")
+    mkdir -p ${scriptdir}/i18n/${STAMP}
+    cd ${scriptdir}/i18n/${STAMP}
+    dak control-suite -l stable > lenny
+    dak control-suite -l testing > squeeze
+    dak control-suite -l unstable > sid
+    echo "${STAMP}" > timestamp
+    gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
+    rm -f md5sum
+    md5sum * > md5sum
+    cd ${webdir}/
+    ln -sfT ${scriptdir}/i18n/${STAMP} i18n
+
+    cd ${scriptdir}
+    find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
+}
+
+function stats() {
+    log "Updating stats data"
+    cd $configdir
+    #$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
+    #R --slave --vanilla < $base/misc/ftpstats.R
+    dak stats arch-space > $webdir/arch-space
+    dak stats pkg-nums > $webdir/pkg-nums
+}
+
+function aptftpcleanup() {
+    log "Clean up apt-ftparchive's databases"
+    cd $configdir
+    apt-ftparchive -q clean apt.conf
+}
+
+function compress() {
+    log "Compress old psql backups"
+    cd $base/backup/
+    find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
+
+    find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
+    while read dumpname; do
+        echo "Compressing $dumpname"
+        bzip2 -9fv "$dumpname"
+    done
+    find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
+    while read dumpname; do
+        echo "Compressing $dumpname"
+        bzip2 -9fv "$dumpname"
+    done
+    finddup -l -d $base/backup
+}
+
+function logstats() {
+    $masterdir/tools/logs.py "$1"
+}
+
+# save timestamp when we start
+function savetimestamp() {
+       NOW=`date "+%Y.%m.%d-%H:%M:%S"`
+       echo ${NOW} > "${dbdir}/dinstallstart"
+}
+
+function maillogfile() {
+    cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
+}
+
+function renamelogfile() {
+    if [ -f "${dbdir}/dinstallstart" ]; then
+        NOW=$(cat "${dbdir}/dinstallstart")
+#        maillogfile
+        mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
+#        logstats "$logdir/dinstall_${NOW}.log"
+        bzip2 -9 "$logdir/dinstall_${NOW}.log"
+    else
+        error "Problem, I don't know when dinstall started, unable to do log statistics."
+        NOW=`date "+%Y.%m.%d-%H:%M:%S"`
+#        maillogfile
+        mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
+        bzip2 -9 "$logdir/dinstall_${NOW}.log"
+    fi
+}
+
+function testingsourcelist() {
+    dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
+}
+
+# do a last run of process-unchecked before dinstall is on.
+function process_unchecked() {
+    log "Processing the unchecked queue"
+    UNCHECKED_WITHOUT_LOCK="-p"
+    do_unchecked
+    sync_debbugs
+}
+
+# do a run of newstage only before dinstall is on.
+function newstage() {
+    log "Processing the newstage queue"
+    UNCHECKED_WITHOUT_LOCK="-p"
+    do_newstage
+}
+
+# Function to update a "statefile" telling people what we are doing
+# (more or less).
+#
+# This should be called with the argument(s)
+#  - Status name we want to show.
+#
+function state() {
+    RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
+    cat >"${DINSTALLSTATE}" <<EOF
+Dinstall start: ${DINSTALLBEGIN}
+Current action: ${1}
+Action start: ${RIGHTNOW}
+EOF
+}
diff --git a/config/backports/dinstall.variables b/config/backports/dinstall.variables
new file mode 100644 (file)
index 0000000..d6d7bb3
--- /dev/null
@@ -0,0 +1,53 @@
+# -*- mode:sh -*-
+# usually we are not using debug logs. Set to 1 if you want them.
+DEBUG=0
+
+# our name
+PROGRAM="dinstall"
+
+# where do we want mails to go? For example log entries made with error()
+if [ "x$(hostname -s)x" != "xmorriconex" ]; then
+    # Not our ftpmaster host
+    MAILTO=${MAILTO:-"root"}
+else
+    # Yay, ftpmaster
+    MAILTO=${MAILTO:-"ftpmaster@backports.debian.org"}
+fi
+
+# How many logfiles to keep
+LOGROTATE=${LOGROTATE:-400}
+
+# Marker for dinstall start
+DINSTALLSTART="${lockdir}/dinstallstart"
+# Marker for dinstall end
+DINSTALLEND="${lockdir}/dinstallend"
+
+# lock cron.unchecked (it immediately exits when this exists)
+LOCK_DAILY="$lockdir/daily.lock"
+
+# Lock cron.unchecked from doing work
+LOCK_ACCEPTED="$lockdir/unchecked.lock"
+
+# Lock process-new from doing work
+LOCK_NEW="$lockdir/processnew.lock"
+
+# This file is simply used to indicate to britney whether or not
+# the Packages file updates completed sucessfully.  It's not a lock
+# from our point of view
+LOCK_BRITNEY="$lockdir/britney.lock"
+
+# If this file exists we exit immediately after the currently running
+# function is done
+LOCK_STOP="$lockdir/archive.stop"
+
+# Lock buildd updates
+LOCK_BUILDD="$lockdir/buildd.lock"
+
+# Statefile for the users
+DINSTALLSTATE="${webdir}/dinstall.status"
+
+# The mirror trace file
+TRACEFILE="${ftpdir}/project/trace/backports-master.debian.org"
+
+# The state file telling us we have something new to do
+DINSTALLPACKAGES="${lockdir}/dinstall.packages"
diff --git a/config/backports/vars b/config/backports/vars
new file mode 100644 (file)
index 0000000..7db419b
--- /dev/null
@@ -0,0 +1,54 @@
+# locations used by many scripts
+
+base=/srv/backports-master.debian.org
+bindir=$base/bin
+ftpdir=$base/ftp/
+indices=$ftpdir/indices
+webdir=/srv/backports-web.debian.org/underlay/
+scriptdir=$base/scripts
+
+archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 "
+
+masterdir=$base/dak/
+overridedir=$base/scripts/override
+extoverridedir=$scriptdir/external-overrides
+configdir=$base/dak/config/backports/
+scriptsdir=$base/dak/scripts/backports/
+dbdir=$base/database/
+
+queuedir=$base/queue
+unchecked=$queuedir/unchecked/
+accepted=$queuedir/accepted/
+done=$queuedir/done/
+over=$base/over/
+lockdir=$base/lock/
+stagedir=$lockdir/stages
+incoming=$base/incoming
+logdir=$base/log/cron/
+mirrordir=$base/mirror/
+newstage=$queuedir/newstage/
+
+
+ftpgroup=debadmin
+
+copyoverrides="lenny-backports.contrib lenny-backports.contrib.src lenny-backports.main lenny-backports.main.debian-installer lenny-backports.main.src lenny-backports.extra.contrib lenny-backports.extra.main"
+
+# Change this to your hostname
+uploadhost=localhost
+uploaddir=/pub/UploadQueue/
+
+# What components to support
+components="main contrib non-free"
+suites="lenny-backports"
+override_types="deb dsc udeb"
+
+# temporary fix only!
+# export TMP=/srv/backports-master.debian.org/tmp
+# export TEMP=/srv/backports-master.debian.org/tmp
+# export TMPDIR==/srv/backports-master.debian.org/tmp
+
+PATH=$masterdir:$PATH
+umask 022
+
+# Set the database variables
+eval $(dak admin config db-shell)
index 1dd52a4e8e594c48b4977cc79d18e166c3524e43..2ceefa8d4ff05fb1a16d04e2ebee571cf71e6171 100644 (file)
@@ -569,6 +569,7 @@ Dir
   Templates "/srv/ftp-master.debian.org/dak/templates/";
   PoolRoot "pool/";
   Lists "/srv/ftp-master.debian.org/database/dists/";
+  Cache "/srv/ftp-master.debian.org/database/";
   Log "/srv/ftp-master.debian.org/log/";
   Lock "/srv/ftp-master.debian.org/lock";
   Morgue "/srv/ftp-master.debian.org/morgue/";
index 636b8e35b3931ae384c3b819e5ffb5514ef77713..65e1410444e16811de335ec25e24ad74434af39a 100755 (executable)
@@ -442,8 +442,8 @@ MINIMAL_APT_CONF="""
 Dir
 {
    ArchiveDir "%(archivepath)s";
-   OverrideDir "/srv/ftp-master.debian.org/scripts/override/";
-   CacheDir "/srv/ftp-master.debian.org/database/";
+   OverrideDir "%(overridedir)s";
+   CacheDir "%(cachedir)s";
 };
 
 Default
@@ -505,11 +505,16 @@ class BuildQueue(object):
                 os.write(fl_fd, '%s\n' % n.fullpath)
             os.close(fl_fd)
 
+            cnf = Config()
+
             # Write minimal apt.conf
             # TODO: Remove hardcoding from template
             (ac_fd, ac_name) = mkstemp()
             os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
-                                                'filelist': fl_name})
+                                                'filelist': fl_name,
+                                                'cachedir': cnf["Dir::Cache"],
+                                                'overridedir': cnf["Dir::Override"],
+                                                })
             os.close(ac_fd)
 
             # Run apt-ftparchive generate
@@ -539,7 +544,6 @@ class BuildQueue(object):
 
             # Sign if necessary
             if self.signingkey:
-                cnf = Config()
                 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
                 if cnf.has_key("Dinstall::SigningPubKeyring"):
                     keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
diff --git a/scripts/backports.org/copyoverrides b/scripts/backports.org/copyoverrides
deleted file mode 100755 (executable)
index a90db62..0000000
+++ /dev/null
@@ -1,29 +0,0 @@
-#! /bin/sh
-
-set -e
-. $SCRIPTVARS
-echo 'Copying override files into public view ...'
-
-for f in $copyoverrides ; do
-       cd $overridedir
-       chmod g+w override.$f
-
-       cd $indices
-       rm -f .newover-$f.gz
-       pc="`gzip 2>&1 -9nv <$overridedir/override.$f >.newover-$f.gz`"
-       set +e
-       nf=override.$f.gz
-       cmp -s .newover-$f.gz $nf
-       rc=$?
-       set -e
-        if [ $rc = 0 ]; then
-               rm -f .newover-$f.gz
-       elif [ $rc = 1 -o ! -f $nf ]; then
-               echo "   installing new $nf $pc"
-               mv -f .newover-$f.gz $nf
-               chmod g+w $nf
-       else
-               echo $? $pc
-               exit 1
-       fi
-done
diff --git a/scripts/backports.org/mkchecksums b/scripts/backports.org/mkchecksums
deleted file mode 100755 (executable)
index 575d55c..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh
-# Update the md5sums file
-
-set -e
-. $SCRIPTVARS
-
-dsynclist=$dbdir/dsync.list
-md5list=$indices/md5sums
-
-echo -n "Creating md5 / dsync index file ... "
-
-cd "$ftpdir"
-dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
-dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
-dsync-flist -q link-dups $dsynclist || true
diff --git a/scripts/backports.org/mklslar b/scripts/backports.org/mklslar
deleted file mode 100755 (executable)
index 19363f1..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/sh
-# Update the ls-lR.
-
-set -e
-. $SCRIPTVARS
-
-cd $ftpdir
-
-filename=ls-lR
-
-echo "Removing any core files ..."
-find -type f -name core -print0 | xargs -0r rm -v
-
-echo "Checking permissions on files in the FTP tree ..."
-find -type f \( \! -perm -444 -o -perm +002 \) -ls
-find -type d \( \! -perm -555 -o -perm +002 \) -ls
-
-echo "Checking symlinks ..."
-symlinks -rd .
-
-echo "Creating recursive directory listing ... "
-rm -f .$filename.new
-TZ=UTC ls -lR | grep -v Archive_Maintenance_In_Progress > .$filename.new
-
-if [ -r ${filename}.gz ] ; then
-  mv -f ${filename}.gz $filename.old.gz
-  mv -f .$filename.new $filename
-  rm -f $filename.patch.gz
-  zcat $filename.old.gz | diff -u - $filename | gzip -9cfn - >$filename.patch.gz
-  rm -f $filename.old.gz
-else
-  mv -f .$filename.new $filename
-fi
-
-gzip -9cfN $filename >$filename.gz
-rm -f $filename
diff --git a/scripts/backports.org/mkmaintainers b/scripts/backports.org/mkmaintainers
deleted file mode 100755 (executable)
index edb0f20..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-#! /bin/sh
-
-echo
-echo -n 'Creating Maintainers index ... '
-
-set -e
-. $SCRIPTVARS
-cd $base/misc/
-
-nonusmaint="$base/misc/Maintainers_Versions-non-US"
-
-
-cd $indices
-dak make-maintainers | sed -e "s/~[^  ]*\([   ]\)/\1/"  | awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
-
-set +e
-cmp .new-maintainers Maintainers >/dev/null
-rc=$?
-set -e
-if [ $rc = 1 ] || [ ! -f Maintainers ] ; then
-       echo -n "installing Maintainers ... "
-       mv -f .new-maintainers Maintainers
-       gzip -9v <Maintainers >.new-maintainers.gz
-       mv -f .new-maintainers.gz Maintainers.gz
-elif [ $rc = 0 ] ; then
-       echo '(same as before)'
-       rm -f .new-maintainers
-else
-       echo cmp returned $rc
-       false
-fi
diff --git a/scripts/backports/copyoverrides b/scripts/backports/copyoverrides
new file mode 100755 (executable)
index 0000000..a90db62
--- /dev/null
@@ -0,0 +1,29 @@
+#! /bin/sh
+
+set -e
+. $SCRIPTVARS
+echo 'Copying override files into public view ...'
+
+for f in $copyoverrides ; do
+       cd $overridedir
+       chmod g+w override.$f
+
+       cd $indices
+       rm -f .newover-$f.gz
+       pc="`gzip 2>&1 -9nv <$overridedir/override.$f >.newover-$f.gz`"
+       set +e
+       nf=override.$f.gz
+       cmp -s .newover-$f.gz $nf
+       rc=$?
+       set -e
+        if [ $rc = 0 ]; then
+               rm -f .newover-$f.gz
+       elif [ $rc = 1 -o ! -f $nf ]; then
+               echo "   installing new $nf $pc"
+               mv -f .newover-$f.gz $nf
+               chmod g+w $nf
+       else
+               echo $? $pc
+               exit 1
+       fi
+done
diff --git a/scripts/backports/expire_dumps b/scripts/backports/expire_dumps
new file mode 120000 (symlink)
index 0000000..649cd12
--- /dev/null
@@ -0,0 +1 @@
+../debian/expire_dumps
\ No newline at end of file
diff --git a/scripts/backports/mkchecksums b/scripts/backports/mkchecksums
new file mode 100755 (executable)
index 0000000..575d55c
--- /dev/null
@@ -0,0 +1,15 @@
+#!/bin/sh
+# Update the md5sums file
+
+set -e
+. $SCRIPTVARS
+
+dsynclist=$dbdir/dsync.list
+md5list=$indices/md5sums
+
+echo -n "Creating md5 / dsync index file ... "
+
+cd "$ftpdir"
+dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
+dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
+dsync-flist -q link-dups $dsynclist || true
diff --git a/scripts/backports/mklslar b/scripts/backports/mklslar
new file mode 100755 (executable)
index 0000000..19363f1
--- /dev/null
@@ -0,0 +1,36 @@
+#!/bin/sh
+# Update the ls-lR.
+
+set -e
+. $SCRIPTVARS
+
+cd $ftpdir
+
+filename=ls-lR
+
+echo "Removing any core files ..."
+find -type f -name core -print0 | xargs -0r rm -v
+
+echo "Checking permissions on files in the FTP tree ..."
+find -type f \( \! -perm -444 -o -perm +002 \) -ls
+find -type d \( \! -perm -555 -o -perm +002 \) -ls
+
+echo "Checking symlinks ..."
+symlinks -rd .
+
+echo "Creating recursive directory listing ... "
+rm -f .$filename.new
+TZ=UTC ls -lR | grep -v Archive_Maintenance_In_Progress > .$filename.new
+
+if [ -r ${filename}.gz ] ; then
+  mv -f ${filename}.gz $filename.old.gz
+  mv -f .$filename.new $filename
+  rm -f $filename.patch.gz
+  zcat $filename.old.gz | diff -u - $filename | gzip -9cfn - >$filename.patch.gz
+  rm -f $filename.old.gz
+else
+  mv -f .$filename.new $filename
+fi
+
+gzip -9cfN $filename >$filename.gz
+rm -f $filename
diff --git a/scripts/backports/mkmaintainers b/scripts/backports/mkmaintainers
new file mode 100755 (executable)
index 0000000..edb0f20
--- /dev/null
@@ -0,0 +1,31 @@
+#! /bin/sh
+
+echo
+echo -n 'Creating Maintainers index ... '
+
+set -e
+. $SCRIPTVARS
+cd $base/misc/
+
+nonusmaint="$base/misc/Maintainers_Versions-non-US"
+
+
+cd $indices
+dak make-maintainers | sed -e "s/~[^  ]*\([   ]\)/\1/"  | awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
+
+set +e
+cmp .new-maintainers Maintainers >/dev/null
+rc=$?
+set -e
+if [ $rc = 1 ] || [ ! -f Maintainers ] ; then
+       echo -n "installing Maintainers ... "
+       mv -f .new-maintainers Maintainers
+       gzip -9v <Maintainers >.new-maintainers.gz
+       mv -f .new-maintainers.gz Maintainers.gz
+elif [ $rc = 0 ] ; then
+       echo '(same as before)'
+       rm -f .new-maintainers
+else
+       echo cmp returned $rc
+       false
+fi
index 2e6f41f3b3a7c296eb284c1af95fb605834f6348..16c482a0ef7e26653b80d64618a9c8c4d3f7f687 100644 (file)
@@ -125,6 +125,8 @@ $remote_timeout = 3*60*60; # 3 hours
 # mail address of maintainer
 $maintainer_mail = "ftpmaster\@debian.org";
 
+# to override the TO address of ALL outgoing mail, set this value.
+$overridemail = "";
 
 # logfile rotating:
 # -----------------
diff --git a/tools/debianqueued-0.9/config-backports b/tools/debianqueued-0.9/config-backports
new file mode 100644 (file)
index 0000000..9144acc
--- /dev/null
@@ -0,0 +1,143 @@
+#
+# example configuration file for debianqueued
+#
+
+# set to != 0 for debugging output (to log file)
+$debug = 0;
+
+# various programs:
+# -----------------
+$gpg       = "/usr/bin/gpg";
+$ssh       = "/usr/bin/ssh";
+$scp       = "/usr/bin/scp";
+$ssh_agent = "/usr/bin/ssh-agent";
+$ssh_add   = "/usr/bin/ssh-add";
+$md5sum    = "/usr/bin/md5sum";
+$mail      = "/usr/sbin/sendmail";
+$mkfifo    = "/usr/bin/mkfifo";
+$tar       = "/bin/tar"; # must be GNU tar!
+$gzip      = "/bin/gzip";
+$ar        = "/usr/bin/ar"; # must support p option, optional
+$ls        = "/bin/ls";
+$cp        = "/bin/cp";
+$chmod     = "/bin/chmod";
+
+# binaries which existance should be tested before each queue run
+#@test_binaries = ();
+
+# general options to ssh/scp
+$ssh_options = "-o'BatchMode yes' -o'FallBackToRsh no' ".
+               "-o'ForwardAgent no' -o'ForwardX11 no' ".
+               "-o'PasswordAuthentication no' -o'StrictHostKeyChecking yes'";
+
+# ssh key file to use for connects to master (empty: default ~/.ssh/identity)
+$ssh_key_file = "";
+
+# the incoming dir we live in
+$incoming = "/srv/backports-upload/pub/UploadQueue";
+
+# the delayed incoming directories
+$incoming_delayed = "/srv/upload.debian.org/ftp/pub/UploadQueue/DELAYED/%d-day";
+
+# maximum delay directory, -1 for no delayed directory,
+# incoming_delayed and target_delayed need to exist.
+$max_delayed = -1;
+
+# files not to delete in $incoming (regexp)
+$keep_files = '(status|\.message|README)$';
+
+# file patterns that aren't deleted right away
+$valid_files = '(\.changes|\.tar\.(?:gz|bz2)|\.dsc|\.u?deb|diff\.gz|\.sh)$';
+
+# Change files to mode 644 locally (after md5 check) or only on master?
+$chmod_on_target = 0;
+
+# Do an md5sum check after upload?
+$check_md5sum = 1;
+
+# name of the status file or named pipe in the incoming dir
+$statusfile = "$incoming/status";
+
+# if 0, status file implemented as FIFO; if > 0, status file is plain
+# file and updated with a delay of this many seconds
+$statusdelay = 30;
+
+# names of the keyring files
+@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg" );
+
+# our log file
+$logfile = "$queued_dir/log";
+
+# our pid file
+$pidfile = "$queued_dir/pid";
+
+# upload method (ssh, copy, ftp)
+$upload_method = "copy";
+
+# name of target host (ignored on copy method)
+$target = "localhost";
+
+# login name on target host (for ssh, always 'ftp' for ftp, ignored for copy)
+$targetlogin = "queue";
+
+# incoming on target host
+$targetdir = "/srv/backports-master.debian.org/queue/unchecked/";
+
+# incoming/delayed on target host
+$targetdir_delayed = "/srv/upload.debian.org/DEFERRED/%d-day";
+
+# select FTP debugging
+$ftpdebug = 0;
+
+# FTP timeout
+$ftptimeout = 900;
+
+# max. number of tries to upload
+$max_upload_retries = 8;
+
+# delay after first failed upload
+$upload_delay_1 = 30*60; # 30 min.
+
+# delay between successive failed uploads
+$upload_delay_2 = 4*60*60; # 4 hours
+
+# packages that must go to nonus.debian.org and thus are rejected here
+#@nonus_packages = qw(gpg-rsaidea);
+
+# timings:
+# --------
+#   time between two queue checks
+$queue_delay = 5*60; # 5 min.
+#   when are stray files deleted?
+$stray_remove_timeout = 24*60*60; # 1 day
+#   delay before reporting problems with a .changes file (not
+#   immediately for to-be-continued uploads)
+$problem_report_timeout = 30*60; # 30 min.
+#   delay before reporting that a .changes file is missing (not
+#   immediately for to-be-continued uploads)
+$no_changes_timeout = 30*60; # 30 min.
+#   when are .changes with persistent problems removed?
+$bad_changes_timeout = 2*24*60*60; # 2 days
+#   how long may a remote operation (ssh/scp) take?
+$remote_timeout = 3*60*60; # 3 hours
+
+# mail address of maintainer
+$maintainer_mail = "ftpmaster\@debian.org";
+
+# to override the TO address of ALL outgoing mail, set this value.
+$overridemail = "debian-backports-changes@lists.debian.org";
+
+# logfile rotating:
+# -----------------
+#    how often to rotate (in days)
+$log_age = 7;
+#    how much old logs to keep
+$log_keep = 4;
+#    send summary mail when rotating logs?
+$mail_summary = 1;
+#    write summary to file when rotating logs? (no if name empty)
+$summary_file = "$queued_dir/summary";
+
+# don't remove this, Perl needs it!
+1;
index a8743e53c533ad2798fb66268f5b3e494f86cb51..77cc90d7a264cd0cfea475782ee655dbe73f7990 100644 (file)
@@ -125,6 +125,8 @@ $remote_timeout = 3*60*60; # 3 hours
 # mail address of maintainer
 $maintainer_mail = "ftpmaster\@debian.org";
 
+# to override the TO address of ALL outgoing mail, set this value.
+$overridemail = "";
 
 # logfile rotating:
 # -----------------
index 8bdfe55563c3a15f34cdbee3c8992679e67c264d..1eb502dcdeba46f4899547407df266750ed88698 100755 (executable)
@@ -59,6 +59,7 @@ $junk         = @conf::test_binaries;
 $junk         = @conf::maintainer_mail;
 $junk         = @conf::targetdir_delayed;
 $junk         = $conf::mail ||= '/usr/sbin/sendmail';
+$junk         = $conf::overridemail;
 $conf::target = "localhost" if $conf::upload_method eq "copy";
 
 package main;
@@ -2330,6 +2331,10 @@ sub send_mail($$$) {
     $Email::Send::Sendmail::SENDMAIL = $conf::mail;
   }
 
+  if ($conf::overridemail) {
+       $addr = $conf::overridemail;
+  }
+
   my $date = sprintf "%s",
     strftime( "%a, %d %b %Y %T %z", ( localtime(time) ) );
   my $message = <<__MESSAGE__;