also add the config and scripts from it.
Most, if not all, should possibly be made cleaner/merged into the debian part
of it, but right now i just want to merge it and get it working with latest
code...
Signed-off-by: Joerg Jaspert <joerg@debian.org>
--- /dev/null
+This file maps each file available in the backports.org archive system to
+the package from which it originates. It includes packages from the
+DIST distribution for the ARCH architecture.
+
+You can use this list to determine which package contains a specific
+file, or whether or not a specific file is available. The list is
+updated weekly, each architecture on a different day.
+
+When a file is contained in more than one package, all packages are
+listed. When a directory is contained in more than one package, only
+the first is listed.
+
+The best way to search quickly for a file is with the Unix `grep'
+utility, as in `grep <regular expression> CONTENTS':
+
+ $ grep nose Contents
+ etc/nosendfile net/sendfile
+ usr/X11R6/bin/noseguy x11/xscreensaver
+ usr/X11R6/man/man1/noseguy.1x.gz x11/xscreensaver
+ usr/doc/examples/ucbmpeg/mpeg_encode/nosearch.param graphics/ucbmpeg
+ usr/lib/cfengine/bin/noseyparker admin/cfengine
+
+This list contains files in all packages, even though not all of the
+packages are installed on an actual system at once. If you want to
+find out which packages on an installed Debian system provide a
+particular file, you can use `dpkg --search <filename>':
+
+ $ dpkg --search /usr/bin/dselect
+ dpkg: /usr/bin/dselect
+
+
+FILE LOCATION
--- /dev/null
+Dir
+{
+ ArchiveDir "/org/backports.org/ftp/";
+ OverrideDir "/org/backports.org/scripts/override/";
+ CacheDir "/org/backports.org/database/";
+};
+
+Default
+{
+ Packages::Compress ". gzip bzip2";
+ Sources::Compress ". gzip bzip2";
+ DeLinkLimit 0;
+ FileMode 0664;
+ Contents::Compress "gzip";
+ MaxContentsChange 12000;
+};
+
+TreeDefault
+{
+ Contents::Header "/org/backports.org/dak-config/Contents.top";
+};
+
+tree "dists/lenny-backports"
+{
+ FileList "/org/backports.org/database/dists/lenny-backports_$(SECTION)_binary-$(ARCH).list";
+ SourceFileList "/org/backports.org/database/dists/lenny-backports_$(SECTION)_source.list";
+ Sections "main contrib non-free";
+ Architectures "alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+ BinOverride "override.lenny-backports.$(SECTION)";
+ ExtraOverride "override.lenny-backports.extra.$(SECTION)";
+ SrcOverride "override.lenny-backports.$(SECTION).src";
+ Packages::Compress ". gzip bzip2";
+ Sources::Compress ". gzip bzip2";
+};
+
+tree "dists/lenny-backports/main"
+{
+ FileList "/org/backports.org/database/dists/lenny-backports_main_$(SECTION)_binary-$(ARCH).list";
+ Sections "debian-installer";
+ Architectures "alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+ BinOverride "override.lenny-backports.main.$(SECTION)";
+ SrcOverride "override.lenny-backports.main.src";
+ BinCacheDB "packages-debian-installer-$(ARCH).db";
+ Packages::Extensions ".udeb";
+ Contents "$(DIST)/../Contents-udeb";
+};
+
+tree "dists/etch-backports"
+{
+ FileList "/org/backports.org/database/dists/etch-backports_$(SECTION)_binary-$(ARCH).list";
+ SourceFileList "/org/backports.org/database/dists/etch-backports_$(SECTION)_source.list";
+ Sections "main contrib non-free";
+ Architectures "alpha amd64 arm hppa hurd-i386 i386 ia64 mips mipsel m68k powerpc s390 sh sparc source";
+ BinOverride "override.etch-backports.$(SECTION)";
+ ExtraOverride "override.etch-backports.extra.$(SECTION)";
+ SrcOverride "override.etch-backports.$(SECTION).src";
+ Packages::Compress ". gzip bzip2";
+ Sources::Compress ". gzip bzip2";
+};
+
+tree "dists/etch-backports/main"
+{
+ FileList "/org/backports.org/database/dists/etch-backports_main_$(SECTION)_binary-$(ARCH).list";
+ Sections "debian-installer";
+ Architectures "alpha amd64 arm hppa hurd-i386 i386 ia64 mips mipsel m68k powerpc s390 sh sparc source";
+ BinOverride "override.etch-backports.main.$(SECTION)";
+ SrcOverride "override.etch-backports.main.src";
+ BinCacheDB "packages-debian-installer-$(ARCH).db";
+ Packages::Extensions ".udeb";
+ Contents "$(DIST)/../Contents-udeb";
+};
--- /dev/null
+#!/bin/bash
+
+# Copyright (c) 2005 Peter Palfrader <peter@palfrader.org>
+
+# WARNING: spaces in filenames/paths considered harmful.
+
+export SCRIPTVARS=/org/backports.org/dak/config/backports.org/vars
+. $SCRIPTVARS
+
+cd ${configdir}
+
+for suite in etch lenny; do
+ source="${ftpdir}/dists/${suite}-backports"
+ target="${basedir}/buildd/dists/${suite}-backports"
+
+ if ! [ -d "$source" ]; then
+ echo "Source '$source' does not exist or is not a directory or we can't acess it." >&2
+ exit 1;
+ fi
+ if ! [ -d "$target" ]; then
+ echo "Target '$target' does not exist or is not a directory or we can't acess it." >&2
+ exit 1;
+ fi
+
+ for file in $( cd "$source" && find . -name 'Packages.gz' -o -name 'Packages' -o -name 'Sources.gz' -o -name 'Sources' -o -name 'Release' ); do
+ basedir=$(dirname "$file")
+ basename=$(basename "$file")
+ targetdir="$target/$basedir"
+ [ -d "$targetdir" ] || mkdir -p "$targetdir"
+ if [ "$basename" = "Release" ]; then
+ cp -a "$source/$file" "$target/$file"
+ echo 'NotAutomatic: yes' >> "$target/$file"
+ else
+ cp -a "$source/$file" "$target/$file"
+ fi
+ done
+
+# postprocess top level Release file
+ if ! [ -e "$target/Release" ]; then
+ echo "Did not find $target/Release after copying stuff. something's fishy" >&2
+ exit 1;
+ fi
+
+ cd "$target"
+
+ perl -a -p -i -e '
+ if (substr($_,0,1) eq " ") {
+ if ($in_sha1 || $in_md5) {
+ ($hash, $size, $file) = @F;
+ $_="",next unless -f $file;
+
+ (undef,undef,undef,undef,undef,undef,undef,$filesize,
+ undef,undef,undef,undef,undef) = stat($file);
+ if ($size != $filesize) {
+ if ($in_sha1) {
+ $hash = `sha1sum "$file" | cut -d " " -f 1`
+ } else {
+ $hash = `md5sum "$file" | cut -d " " -f 1`
+ };
+ chomp $hash;
+ $_ = sprintf(" %s %16d %s\n", $hash, $filesize, $file);
+ }
+ }
+ } else {
+ $in_sha1 = ($F[0] eq "SHA1:") ? 1 : 0;
+ $in_md5 = ($F[0] eq "MD5Sum:") ? 1 : 0;
+ }
+' Release
+
+ rm -f ${basedir}/buildd/dists/${suite}-backports/Release.gpg
+ gpg --no-options --batch --no-tty --secret-keyring ${basedir}/s3kr1t/dot-gnupg/secring.gpg --output "Release.gpg" --armor --detach-sign "Release"
+done
--- /dev/null
+#! /bin/bash -e
+
+# Executed hourly via cron, out of katie's crontab.
+# stolen from newraff and adjusted by aba on 2005-04-30
+#exit 0
+
+export SCRIPTVARS=/org/backports.org/dak-config/vars
+. $SCRIPTVARS
+ssh -i $base/s3kr1t/dot-ssh/id_rsa wanna-build@wanna-build.farm.ftbfs.de echo broken
+exit 0
--- /dev/null
+#! /bin/sh
+#
+# Executed daily via cron, out of katie's crontab.
+
+set -e
+export SCRIPTVARS=/org/backports.org/dak-config/vars
+. $SCRIPTVARS
+
+################################################################################
+# Clean out old packages
+dak clean-suites
+dak clean-queues
+
+# Send a report on NEW/BYHAND packages
+dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@backports.org
+# and one on crufty packages
+dak cruft-report | tee $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@backports.org
+
+echo Daily cron scripts successful.
--- /dev/null
+#! /bin/sh
+#
+# Executed daily via cron, out of katie's crontab.
+set -e
+export SCRIPTVARS=/org/backports.org/dak-config/vars
+. $SCRIPTVARS
+
+################################################################################
+cd $accepted
+
+changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
+
+if [ -z "$changes" ]; then
+ exit 0;
+fi
+
+echo Archive maintenance started at $(date +%X)
+
+NOTICE="$ftpdir/Archive_Maintenance_In_Progress"
+LOCKCU="$lockdir/daily.lock"
+LOCKAC="$lockdir/unchecked.lock"
+
+cleanup() {
+ rm -f "$NOTICE"
+ rm -f "$LOCKCU"
+}
+trap cleanup 0
+
+rm -f "$NOTICE"
+lockfile -l 3600 $LOCKCU
+cat > "$NOTICE" <<EOF
+Packages are currently being installed and indices rebuilt.
+Maintenance is automatic, starting hourly at 5 minutes past the hour.
+Most of the times it is finished after about 10 til 15 minutes.
+
+You should not mirror the archive during this period.
+EOF
+
+################################################################################
+
+cd $accepted
+rm -f REPORT
+dak process-accepted -pa *.changes | tee REPORT | \
+ mail -s "Install for $(date +%d.%m.%Y)" ftpmaster@backports.org
+chgrp debadmin REPORT
+chmod 664 REPORT
+
+cd $masterdir
+
+rm -f $LOCKAC
+
+symlinks -d -r $ftpdir
+
+cd $masterdir
+dak make-suite-file-list
+
+# Generate override files
+cd $overridedir
+dak make-overrides
+
+# Generate Packages and Sources files
+cd $configdir
+apt-ftparchive generate apt.conf
+# Generate *.diff/ incremental updates
+dak generate-index-diffs
+# Generate Release files
+dak generate-releases
+
+# Clean out old packages
+# Now in cron.daily. JJ[03.05.2005.]
+#rhona
+#shania
+
+cd $scriptsdir
+./mkmaintainers
+./copyoverrides
+./mklslar
+./mkchecksums
+
+rm -f $NOTICE
+rm -f $LOCKCU
+echo Archive maintenance finished at $(date +%X)
+
+################################################################################
+
+echo "Creating post-hourly-cron-job backup of projectb database..."
+POSTDUMP=/org/backports.org/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
+pg_dump projectb > $POSTDUMP
+(cd /org/backports.org/backup; ln -sf $POSTDUMP current)
+
+################################################################################
+
+# Vacuum the database
+echo "VACUUM; VACUUM ANALYZE;" | psql projectb 2>&1 | grep -v "^NOTICE: Skipping.*only table owner can VACUUM it$"
+
+################################################################################
+
+# Now in cron.daily JJ[03.05.2005]
+# Send a report on NEW/BYHAND packages
+#helena | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@amd64.debian.net
+# and one on crufty package
+#rene | mail -e -s "rene run for $(date +%D)" ftpmaster@amd64.debian.net
+
+################################################################################
+
+(cd /org/backports.org/stats; rm -f master.list; ./dmc.pl get >/dev/null 2>&1; \
+./mirror.pl>$ftpdir/README.mirrors.html; cd $ftpdir; /usr/bin/links -dump README.mirrors.html >README.mirrors.txt)
+
+
+################################################################################
+
+ulimit -m 90000 -d 90000 -s 10000 -v 90000
+
+run-parts --report /org/backports.org/scripts/distmnt
+
+echo Daily cron scripts successful.
--- /dev/null
+#!/bin/sh
+#
+# Run at the beginning of the month via cron, out of katie's crontab.
+
+set -e
+export SCRIPTVARS=/org/backports.org/dak-config/vars
+. $SCRIPTVARS
+
+################################################################################
+
+DATE=`date -d yesterday +%y%m`
+
+cd ${basedir}/mail/archive
+for m in mail import; do
+ if [ -f $m ]; then
+ mv $m ${m}-$DATE
+ sleep 20
+ gzip -9 ${m}-$DATE
+ chgrp debadmin ${m}-$DATE.gz
+ chmod 660 ${m}-$DATE.gz
+ fi;
+done
+
+DATE=`date +%Y-%m`
+cd ${basedir}/log
+touch $DATE
+rm current
+ln -s $DATE current
+chmod g+w $DATE
+chown dak:debadmin $DATE
+
+dak split-done
+################################################################################
--- /dev/null
+#! /bin/sh
+set -e
+export SCRIPTVARS=/org/backports.org/dak-config/vars
+. $SCRIPTVARS
+
+LOCKFILE="$lockdir/unchecked.lock"
+NOTICE="$lockdir/daily.lock"
+
+cleanup() {
+ rm -f "$LOCKFILE"
+ if [ ! -z $LOCKDAILY ]; then
+ rm -f "$NOTICE"
+ fi
+}
+trap cleanup 0
+
+# only run one cron.unchecked
+if lockfile -r3 $LOCKFILE; then
+ cd $unchecked
+
+ changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
+ report=$queuedir/REPORT
+ timestamp=$(date "+%Y-%m-%d %H:%M")
+
+ if [ ! -z "$changes" ]; then
+ echo "$timestamp": "$changes" >> $report
+ dak process-unchecked -a $changes >> $report
+ echo "--" >> $report
+ else
+ echo "$timestamp": Nothing to do >> $report
+ fi
+fi
+
+rm -f "$LOCKFILE"
--- /dev/null
+#!/bin/sh
+#
+# Run once a week via cron, out of katie's crontab.
+
+set -e
+export SCRIPTVARS=/org/backports.org/dak-config/vars
+. $SCRIPTVARS
+
+################################################################################
+
+# Purge empty directories
+
+if [ ! -z "$(find $ftpdir/pool/ -type d -empty)" ]; then
+ find $ftpdir/pool/ -type d -empty | xargs rmdir;
+fi
+
+# Clean up apt-ftparchive's databases
+
+cd $configdir
+apt-ftparchive -q clean apt.conf
+
+################################################################################
--- /dev/null
+Dinstall
+{
+ // Both need to be defined at the moment, but they can point to the
+ // same file.
+ GPGKeyring {
+ "/org/backports.org/keyrings/keyring.gpg";
+ };
+ // To sign the release files. Adjust the keyid!
+ // Note: Key must be without a passphrase or it wont work automagically!
+ SigningKeyring "/org/backports.org/s3kr1t/dot-gnupg/secring.gpg";
+ SigningPubKeyring "/org/backports.org/s3kr1t/dot-gnupg/pubring.gpg";
+ SigningKeyIds "16BA136C";
+ SendmailCommand "/usr/sbin/sendmail -odq -oi -t";
+ MyEmailAddress "Backports.org archive Installer <installer@backports.org>";
+ MyAdminAddress "ftpmaster@backports.org";
+ MyHost "backports.org"; // used for generating user@my_host addresses in e.g. manual_reject()
+ MyDistribution "Backports.org archive"; // Used in emails
+ // Alicia and melanie can use it
+ BugServer "bugs.backports.org";
+ // melanie uses the packages server.
+ // PackagesServer "packages.test.backports.org";
+ // If defined then the package@this.server gets a copy of most of the
+ // actions related to the package. For an example look at
+ // packages.qa.debian.org
+ // TrackingServer "packages.qa.test.backports.org";
+ LockFile "/org/backports.org/lock/dinstall.lock";
+ // If defined this address gets a bcc of all mails.
+ // FIXME: Einrichten wenn das hier produktiv geht!
+ Bcc "backports-archive@lists.backports.org";
+ GroupOverrideFilename "override.group-maint";
+ FutureTimeTravelGrace 28800; // 8 hours
+ PastCutoffYear "1984";
+ SkipTime 300;
+ // If defined then mails to close bugs are sent to the bugserver.
+ CloseBugs "false";
+ OverrideDisparityCheck "true";
+ DefaultSuite "etch-backports";
+ Reject
+ {
+ NoSourceOnly "true";
+ ReleaseTransitions "/org/backports.org/hints/transitions.yaml";
+ };
+ // If set, only send mails to addresses listed there.
+ MailWhiteList "/org/backports.org/dak/config/backports.org/mail-whitelist";
+};
+
+Generate-Index-Diffs
+{
+ Options
+ {
+ TempDir "/org/backports.org/tiffani";
+ MaxDiffs { Default 50; };
+ };
+};
+
+Override
+{
+ MyEmailAddress "Backports.org archive FTP Masters <ftpmaster@backports.org>";
+};
+
+Add-User
+{
+// Should we sent a mail to newly added users?
+ SendEmail "true";
+
+// Should we create an account so they can login?
+// Account will be created with the defaults from adduser, so adjust
+// it's configuration to fit your needs.
+// NOTE: This requires that your dak user has a sudo entry, allowing
+// to run /usr/sbin/useradd!
+ CreateAccount "false";
+
+// Note: This is a comma separated list of additional groupnames to
+// which uma should add the user. NO spaces between the groupnames or
+// useradd will die.
+// Disable it if you dont want or need that feature.
+ GID "debuser";
+
+};
+
+Check-Overrides
+{
+ OverrideSuites
+ {
+ lenny-backports
+ {
+ Process "1";
+// OriginSuite "Unstable";
+ };
+
+ etch-backports
+ {
+ Process "1";
+// OriginSuite "Unstable";
+ };
+
+// Unstable
+// {
+// Process "0";
+// };
+ };
+};
+
+
+Import-Users-From-Passwd
+{
+ // The Primary GID of your users. Using uma it is the gid from group users.
+ ValidGID "1001";
+ // Comma separated list of users who are in Postgres but not the passwd file
+ KnownPostgres "postgres,katie";
+};
+
+Clean-Queues
+{
+ Options
+ {
+ Days 14;
+ };
+ MorgueSubDir "queues";
+};
+
+Control-Overrides
+{
+ Options
+ {
+ Component "main";
+ Suite "etch-backports";
+ Type "deb";
+ };
+
+ ComponentPosition "prefix"; // Whether the component is prepended or appended to the section name
+};
+
+Rm
+{
+ Options
+ {
+ Suite "etch-backports";
+ };
+
+ MyEmailAddress "Backports.org archive Maintenance <ftpmaster@backports.org>";
+ LogFile "/org/backports.org/ftp/removals.txt";
+};
+
+Import-Archive
+{
+ ExportDir "/org/backports.org/dak/import-archive-files/";
+};
+
+Clean-Suites
+{
+ // How long (in seconds) dead packages are left before being killed
+ StayOfExecution 1209600; // 14 days
+ AcceptedAutoBuildStayOfExecution 86400; // 24 hours
+ MorgueSubDir "pool";
+};
+
+Process-New
+{
+ AcceptedLockFile "/org/backports.org/lock/unchecked.lock";
+};
+
+Suite
+{
+ lenny-backports
+ {
+ Components
+ {
+ main;
+ contrib;
+ non-free;
+ };
+ Architectures
+ {
+ source;
+ all;
+ alpha;
+ amd64;
+ arm;
+ armel;
+ hppa;
+ i386;
+ ia64;
+ mips;
+ mipsel;
+ powerpc;
+ s390;
+ sparc;
+ };
+
+ Announce "backports-changes@lists.backports.org";
+ Origin "Backports.org archive";
+ Description "Backports for the Lenny Distribution";
+ CodeName "Lenny-backports";
+ OverrideCodeName "Lenny-backports";
+ Priority "7";
+ NotAutomatic "yes";
+ };
+
+ etch-backports
+ {
+ Components
+ {
+ main;
+ contrib;
+ non-free;
+ };
+ Architectures
+ {
+ source;
+ all;
+ alpha;
+ amd64;
+ arm;
+ hppa;
+ hurd-i386;
+ i386;
+ ia64;
+ m68k;
+ mips;
+ mipsel;
+ powerpc;
+ s390;
+ sh;
+ sparc;
+ };
+ Announce "backports-changes@lists.backports.org";
+ Origin "Backports.org archive";
+ Description "Backports for the Etch Distribution";
+ CodeName "etch-backports";
+ OverrideCodeName "etch-backports";
+ Priority "7";
+ NotAutomatic "yes";
+ };
+
+};
+
+Dir
+{
+ Root "/org/backports.org/ftp/";
+ Pool "/org/backports.org/ftp/pool/";
+ Templates "/org/backports.org/dak/templates/";
+ PoolRoot "pool/";
+ Lists "/org/backports.org/database/dists/";
+ Log "/org/backports.org/log/";
+ Morgue "/org/backports.org/morgue/";
+ MorgueReject "reject";
+ Lock "/org/backports.org/lock";
+ Override "/org/backports.org/scripts/override/";
+ UrgencyLog "/org/backports.org/testing/urgencies/";
+ Queue
+ {
+ Accepted "/org/backports.org/queue/accepted/";
+ Byhand "/org/backports.org/queue/byhand/";
+ Done "/org/backports.org/queue/done/";
+ Holding "/org/backports.org/queue/holding/";
+ New "/org/backports.org/queue/new/";
+ ProposedUpdates "/org/backports.org/queue/p-u-new/";
+ Reject "/org/backports.org/queue/reject/";
+ Unchecked "/org/backports.org/queue/unchecked/";
+ BTSVersionTrack "/org/backports.org/queue/bts_version_track/";
+ Embargoed "/org/backports.org/queue/Embargoed/";
+ Unembargoed "/org/backports.org/queue/Unembargoed/";
+ OldProposedUpdates "/org/backports.org/queue/Unembargoed/";
+ };
+};
+
+DB
+{
+ Name "projectb";
+ Host "";
+ Port -1;
+};
+
+SuiteMappings
+{
+ "propup-version stable-security testing";
+ "propup-version testing-security unstable";
+// "map stable proposed-updates";
+ "map lenny lenny-backports";
+ "map lenny-bpo lenny-backports";
+ "map etch etch-backports";
+// formi mag des nit
+// "map stable etch-backports";
+ "map etch-bpo etch-backports";
+// "map stable-security proposed-updates";
+// "map-unreleased stable unstable";
+// "map-unreleased proposed-updates unstable";
+// "map testing testing-proposed-updates";
+// "map testing-security testing-proposed-updates";
+// "map-unreleased testing unstable";
+// "map-unreleased testing-proposed-updates unstable";
+};
+
+Architectures
+{
+ source "Source";
+ all "Architecture Independent";
+ alpha "DEC Alpha";
+ amd64 "AMD x86_64 (AMD64)";
+ hurd-i386 "Intel ia32 running the HURD";
+ hppa "HP PA RISC";
+ arm "ARM";
+ armel "ARM EABI";
+ i386 "Intel ia32";
+ ia64 "Intel ia64";
+ m68k "Motorola Mc680x0";
+ mips "MIPS (Big Endian)";
+ mipsel "MIPS (Little Endian)";
+ powerpc "PowerPC";
+ s390 "IBM S/390";
+ sh "Hitatchi SuperH";
+ sparc "Sun SPARC/UltraSPARC";
+};
+
+Archive
+{
+ backports
+ {
+ OriginServer "backports.org";
+ PrimaryMirror "backports.org";
+ Description "Master Archive for Backports.org archive";
+ };
+};
+
+Component
+{
+ main
+ {
+ Description "Main";
+ MeetsDFSG "true";
+ };
+
+ contrib
+ {
+ Description "Contrib";
+ MeetsDFSG "true";
+ };
+
+ non-free
+ {
+ Description "Software that fails to meet the DFSG";
+ MeetsDFSG "false";
+ };
+
+};
+
+Section
+{
+ admin;
+ base;
+ comm;
+ debian-installer;
+ devel;
+ doc;
+ editors;
+ embedded;
+ electronics;
+ games;
+ gnome;
+ graphics;
+ hamradio;
+ interpreters;
+ kde;
+ libdevel;
+ libs;
+ mail;
+ math;
+ misc;
+ net;
+ news;
+ oldlibs;
+ otherosfs;
+ perl;
+ python;
+ science;
+ shells;
+ sound;
+ tex;
+ text;
+ utils;
+ web;
+ x11;
+};
+
+Priority
+{
+ required 1;
+ important 2;
+ standard 3;
+ optional 4;
+ extra 5;
+ source 0; // i.e. unused
+};
+
+OverrideType
+{
+ deb;
+ udeb;
+ dsc;
+};
+
+Location
+{
+ // Pool locations on backports.org
+ /org/backports.org/ftp/pool/
+ {
+ Archive "backports";
+ Type "pool";
+ };
+
+};
+
+Urgency
+{
+ Default "low";
+ Valid
+ {
+ low;
+ medium;
+ high;
+ emergency;
+ critical;
+ };
+};
--- /dev/null
+# locations used by many scripts
+
+base=/org/backports.org
+ftpdir=$base/ftp/
+webdir=$base/web
+
+archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 m68k mips mipsel powerpc s390 sh sparc"
+
+masterdir=$base/dak/
+overridedir=$base/scripts/override
+extoverridedir=$scriptdir/external-overrides
+configdir=$base/dak/config/backports.org/
+scriptsdir=$base/dak/scripts/backports.org/
+
+queuedir=$base/queue
+unchecked=$queuedir/unchecked/
+accepted=$queuedir/accepted/
+done=$queuedir/done/
+over=$base/over/
+lockdir=$base/lock/
+incoming=$base/incoming
+
+dbdir=$base/database/
+indices=$ftpdir/indices
+
+ftpgroup=debadmin
+
+copyoverrides="lenny-backports.contrib lenny-backports.contrib.src lenny-backports.main lenny-backports.main.debian-installer lenny-backports.main.src lenny-backports.extra.contrib lenny-backports.extra.main"
+
+# Change this to your hostname
+uploadhost=localhost
+uploaddir=/pub/UploadQueue/
+
+# What components to support
+components="main contrib non-free"
+suites="lenny-backports"
+override_types="deb dsc udeb"
+
+# temporary fix only!
+# export TMP=/org/backports.org/tmp
+# export TEMP=/org/backports.org/tmp
+# export TMPDIR==/org/backports.org/tmp
+
+PATH=$masterdir:$PATH
+umask 022
--- /dev/null
+#! /bin/sh
+
+set -e
+. $SCRIPTVARS
+echo 'Copying override files into public view ...'
+
+for f in $copyoverrides ; do
+ cd $overridedir
+ chmod g+w override.$f
+
+ cd $indices
+ rm -f .newover-$f.gz
+ pc="`gzip 2>&1 -9nv <$overridedir/override.$f >.newover-$f.gz`"
+ set +e
+ nf=override.$f.gz
+ cmp -s .newover-$f.gz $nf
+ rc=$?
+ set -e
+ if [ $rc = 0 ]; then
+ rm -f .newover-$f.gz
+ elif [ $rc = 1 -o ! -f $nf ]; then
+ echo " installing new $nf $pc"
+ mv -f .newover-$f.gz $nf
+ chmod g+w $nf
+ else
+ echo $? $pc
+ exit 1
+ fi
+done
--- /dev/null
+#!/bin/sh
+# Update the md5sums file
+
+set -e
+. $SCRIPTVARS
+
+dsynclist=$dbdir/dsync.list
+md5list=$indices/md5sums
+
+echo -n "Creating md5 / dsync index file ... "
+
+cd "$ftpdir"
+dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
+dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
+dsync-flist -q link-dups $dsynclist || true
--- /dev/null
+#!/bin/sh
+# Update the ls-lR.
+
+set -e
+. $SCRIPTVARS
+
+cd $ftpdir
+
+filename=ls-lR
+
+echo "Removing any core files ..."
+find -type f -name core -print0 | xargs -0r rm -v
+
+echo "Checking permissions on files in the FTP tree ..."
+find -type f \( \! -perm -444 -o -perm +002 \) -ls
+find -type d \( \! -perm -555 -o -perm +002 \) -ls
+
+echo "Checking symlinks ..."
+symlinks -rd .
+
+echo "Creating recursive directory listing ... "
+rm -f .$filename.new
+TZ=UTC ls -lR | grep -v Archive_Maintenance_In_Progress > .$filename.new
+
+if [ -r ${filename}.gz ] ; then
+ mv -f ${filename}.gz $filename.old.gz
+ mv -f .$filename.new $filename
+ rm -f $filename.patch.gz
+ zcat $filename.old.gz | diff -u - $filename | gzip -9cfn - >$filename.patch.gz
+ rm -f $filename.old.gz
+else
+ mv -f .$filename.new $filename
+fi
+
+gzip -9cfN $filename >$filename.gz
+rm -f $filename
--- /dev/null
+#! /bin/sh
+
+echo
+echo -n 'Creating Maintainers index ... '
+
+set -e
+. $SCRIPTVARS
+cd $base/misc/
+
+nonusmaint="$base/misc/Maintainers_Versions-non-US"
+
+
+cd $indices
+dak make-maintainers | sed -e "s/~[^ ]*\([ ]\)/\1/" | awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
+
+set +e
+cmp .new-maintainers Maintainers >/dev/null
+rc=$?
+set -e
+if [ $rc = 1 ] || [ ! -f Maintainers ] ; then
+ echo -n "installing Maintainers ... "
+ mv -f .new-maintainers Maintainers
+ gzip -9v <Maintainers >.new-maintainers.gz
+ mv -f .new-maintainers.gz Maintainers.gz
+elif [ $rc = 0 ] ; then
+ echo '(same as before)'
+ rm -f .new-maintainers
+else
+ echo cmp returned $rc
+ false
+fi