# common functions are "outsourced"
. "${configdir}/common"
-# Timestamp. Used for dinstall stat graphs
-function ts() {
- echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
-}
-
-# Cleanup actions
-function cleanup() {
- rm -f ${LOCK_DAILY}
- rm -f ${LOCK_ACCEPTED}
-}
-
-# If we error out this one is called, *FOLLOWED* by cleanup above
-function onerror() {
- ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
-
- subject="ATTENTION ATTENTION!"
- if [ "${error}" = "false" ]; then
- subject="${subject} (continued)"
- else
- subject="${subject} (interrupted)"
- fi
- subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
-
- cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org
-}
-
-########################################################################
-# the actual dinstall functions follow #
-########################################################################
-
-# pushing merkels QA user, part one
-function merkel1() {
- log "Telling merkels QA user that we start dinstall"
- ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
-}
-
-# Create the postgres dump files
-function pgdump_pre() {
- log "Creating pre-daily-cron-job backup of projectb database..."
- pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
-}
-
-function pgdump_post() {
- log "Creating post-daily-cron-job backup of projectb database..."
- cd $base/backup
- POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
- pg_dump projectb > $base/backup/dump_$POSTDUMP
- pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
- ln -sf $base/backup/dump_$POSTDUMP current
- ln -sf $base/backup/dumpall_$POSTDUMP currentall
-}
-
-# Load the dak-dev projectb
-function pgdakdev() {
- cd $base/backup
- echo "drop database projectb" | psql -p 5433 template1
- cat currentall | psql -p 5433 template1
- createdb -p 5433 -T template0 projectb
- fgrep -v '\connect' current | psql -p 5433 projectb
-}
-
-# Updating various files
-function updates() {
- log "Updating Bugs docu, Mirror list and mailing-lists.txt"
- cd $configdir
- $scriptsdir/update-bugdoctxt
- $scriptsdir/update-mirrorlists
- $scriptsdir/update-mailingliststxt
- $scriptsdir/update-pseudopackages.sh
-}
-
-# Process (oldstable)-proposed-updates "NEW" queue
-function punew_do() {
- cd "${queuedir}/${1}"
- date -u -R >> REPORT
- dak process-new -a -C COMMENTS >> REPORT || true
- echo >> REPORT
-}
-function punew() {
- log "Doing automated p-u-new processing"
- punew_do "$1"
-}
-function opunew() {
- log "Doing automated o-p-u-new processing"
- punew_do "$1"
-}
-
-# The first i18n one, syncing new descriptions
-function i18n1() {
- log "Synchronizing i18n package descriptions"
- # First sync their newest data
- cd ${scriptdir}/i18nsync
- rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
-
- # Now check if we still know about the packages for which they created the files
- # is the timestamp signed by us?
- if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
- # now read it. As its signed by us we are sure the content is what we expect, no need
- # to do more here. And we only test -d a directory on it anyway.
- TSTAMP=$(cat timestamp)
- # do we have the dir still?
- if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
- # Lets check!
- if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
- # Yay, worked, lets copy around
- for dir in squeeze sid; do
- if [ -d dists/${dir}/ ]; then
- cd dists/${dir}/main/i18n
- rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
- fi
- cd ${scriptdir}/i18nsync
- done
- else
- echo "ARRRR, bad guys, wrong files, ARRR"
- echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
- fi
- else
- echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
- echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
- fi
- else
- echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
- echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
- fi
-}
-
-# Process the accepted queue
-function accepted() {
- log "Processing queue/accepted"
- rm -f "$accepted/REPORT"
- dak process-accepted -pa -d "$accepted" > "$accepted/REPORT"
- cat "$accepted/REPORT" | mail -s "Install for $(date +"%D - %R")" ftpmaster@ftp-master.debian.org
- chgrp debadmin "$accepted/REPORT"
- chmod 664 "$accepted/REPORT"
-}
-
-function cruft() {
- log "Checking for cruft in overrides"
- dak check-overrides
-}
-
-function msfl() {
- log "Generating suite file lists for apt-ftparchive"
- dak make-suite-file-list
-}
-
-function fingerprints() {
- log "Updating fingerprints"
- dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
-
- OUTFILE=$(mktemp)
- dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
-
- if [ -s "${OUTFILE}" ]; then
- /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
-From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
-To: <debian-project@lists.debian.org>
-Subject: Debian Maintainers Keyring changes
-Content-Type: text/plain; charset=utf-8
-MIME-Version: 1.0
-
-The following changes to the debian-maintainers keyring have just been activated:
-
-$(cat $OUTFILE)
-
-Debian distribution maintenance software,
-on behalf of the Keyring maintainers
-
-EOF
- fi
- rm -f "$OUTFILE"
-}
-
-function overrides() {
- log "Writing overrides into text files"
- cd $overridedir
- dak make-overrides
-
- # FIXME
- rm -f override.sid.all3
- for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
-}
-
-function mpfm() {
- log "Generating package / file mapping"
- dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
-}
-
-function packages() {
- log "Generating Packages and Sources files"
- cd $configdir
- GZIP='--rsyncable' ; export GZIP
- apt-ftparchive generate apt.conf
-}
-
-function pdiff() {
- log "Generating pdiff files"
- dak generate-index-diffs
-}
-
-function release() {
- log "Generating Release files"
- dak generate-releases
-}
-
-function dakcleanup() {
- log "Cleanup old packages/files"
- dak clean-suites -m 10000
- dak clean-queues
-}
-
-function buildd() {
- # Needs to be rebuilt, as files have moved. Due to unaccepts, we need to
- # update this before wanna-build is updated.
- log "Regenerating wanna-build/buildd information"
- psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list
- symlinks -d /srv/incoming.debian.org/buildd > /dev/null
- apt-ftparchive generate apt.conf.buildd
-}
-
-function buildd_dir() {
- # Rebuilt the buildd dir to avoid long times of 403
- log "Regenerating the buildd incoming dir"
- STAMP=$(date "+%Y%m%d%H%M")
- make_buildd_dir
-}
-
-function mklslar() {
- cd $ftpdir
-
- FILENAME=ls-lR
-
- log "Removing any core files ..."
- find -type f -name core -print0 | xargs -0r rm -v
-
- log "Checking permissions on files in the FTP tree ..."
- find -type f \( \! -perm -444 -o -perm +002 \) -ls
- find -type d \( \! -perm -555 -o -perm +002 \) -ls
-
- log "Checking symlinks ..."
- symlinks -rd .
-
- log "Creating recursive directory listing ... "
- rm -f .${FILENAME}.new
- TZ=UTC ls -lR > .${FILENAME}.new
-
- if [ -r ${FILENAME}.gz ] ; then
- mv -f ${FILENAME}.gz ${FILENAME}.old.gz
- mv -f .${FILENAME}.new ${FILENAME}
- rm -f ${FILENAME}.patch.gz
- zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
- rm -f ${FILENAME}.old.gz
- else
- mv -f .${FILENAME}.new ${FILENAME}
- fi
-
- gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
- rm -f ${FILENAME}
-}
-
-function mkmaintainers() {
- log -n 'Creating Maintainers index ... '
-
- cd $indices
- dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
- sed -e "s/~[^ ]*\([ ]\)/\1/" | awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
-
- set +e
- cmp .new-maintainers Maintainers >/dev/null
- rc=$?
- set -e
- if [ $rc = 1 ] || [ ! -f Maintainers ] ; then
- log -n "installing Maintainers ... "
- mv -f .new-maintainers Maintainers
- gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
- mv -f .new-maintainers.gz Maintainers.gz
- elif [ $rc = 0 ] ; then
- log '(same as before)'
- rm -f .new-maintainers
- else
- log cmp returned $rc
- false
- fi
-}
-
-function copyoverrides() {
- log 'Copying override files into public view ...'
-
- for f in $copyoverrides ; do
- cd $overridedir
- chmod g+w override.$f
-
- cd $indices
- rm -f .newover-$f.gz
- pc="`gzip 2>&1 -9nv <$overridedir/override.$f >.newover-$f.gz`"
- set +e
- nf=override.$f.gz
- cmp -s .newover-$f.gz $nf
- rc=$?
- set -e
- if [ $rc = 0 ]; then
- rm -f .newover-$f.gz
- elif [ $rc = 1 -o ! -f $nf ]; then
- log " installing new $nf $pc"
- mv -f .newover-$f.gz $nf
- chmod g+w $nf
- else
- log $? $pc
- exit 1
- fi
- done
-}
-
-function mkfilesindices() {
- umask 002
- cd $base/ftp/indices/files/components
-
- ARCHLIST=$(tempfile)
-
- log "Querying projectb..."
- echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST
-
- includedirs () {
- perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
- }
- poolfirst () {
- perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
- }
-
- log "Generating sources list
- (
- sed -n 's/|$//p' $ARCHLIST
- cd $base/ftp
- find ./dists -maxdepth 1 \! -type d
- find ./dists \! -type d | grep "/source/"
- ) | sort -u | gzip --rsyncable -9 > source.list.gz
-
- log "Generating arch lists
-
- ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
- for a in $ARCHES; do
- (sed -n "s/|$a$//p" $ARCHLIST
- sed -n 's/|all$//p' $ARCHLIST
-
- cd $base/ftp
- find ./dists -maxdepth 1 \! -type d
- find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
- ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
- done
-
- log "Generating suite lists"
-
- suite_list () {
- printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb
-
- printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb
- }
-
- printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb |
- while read id suite; do
- [ -e $base/ftp/dists/$suite ] || continue
- (
- (cd $base/ftp
- distname=$(cd dists; readlink $suite || echo $suite)
- find ./dists/$distname \! -type d
- for distdir in ./dists/*; do
- [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
- done
- )
- suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,'
- ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
- done
-
- log "Finding everything on the ftp site to generate sundries"
-
- (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
-
- rm -f sundries.list
- zcat *.list.gz | cat - *.list | sort -u |
- diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
-
- log "Generating files list"
-
- for a in $ARCHES; do
- (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
- cat - sundries.list dists.list project.list docs.list indices.list |
- sort -u | poolfirst > ../arch-$a.files
- done
-
- (cd $base/ftp/
- for dist in sid squeeze; do
- find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
- done
- )
-
- (cat ../arch-i386.files ../arch-amd64.files; zcat suite-oldstable.list.gz suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
- sort -u | poolfirst > ../typical.files
-
- rm -f $ARCHLIST
- log "Done!"
-}
-
-function mkchecksums() {
- dsynclist=$dbdir/dsync.list
- md5list=$indices/md5sums
-
- log -n "Creating md5 / dsync index file ... "
-
- cd "$ftpdir"
- ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
- ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
- ${bindir}/dsync-flist -q link-dups $dsynclist || true
-}
-
-function scripts() {
- log "Running various scripts from $scriptsdir"
- cd $scriptsdir
- mkmaintainers
- copyoverrides
- mklslar
- mkfilesindices
- mkchecksums
-}
-
-function mirror() {
- log "Regenerating \"public\" mirror/ hardlink fun"
- cd ${mirrordir}
- rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
-}
-
-function wb() {
- log "Trigger daily wanna-build run"
- ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org
-}
-
-function expire() {
- log "Expiring old database dumps..."
- cd $base/backup
- $scriptsdir/expire_dumps -d . -p -f "dump_*"
-}
-
-function transitionsclean() {
- log "Removing out of date transitions..."
- cd $base
- dak transitions -c -a
-}
-
-function reports() {
- # Send a report on NEW/BYHAND packages
- log "Nagging ftpteam about NEW/BYHAND packages"
- dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
- # and one on crufty packages
- log "Sending information about crufty packages"
- dak cruft-report > $webdir/cruft-report-daily.txt
- dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
- cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
-}
-
-function dm() {
- log "Updating DM html page"
- $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
-}
-
-function bts() {
- log "Categorizing uncategorized bugs filed against ftp.debian.org"
- dak bts-categorize
-}
-
-function merkel2() {
- # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
- log "Trigger merkel/flotows projectb sync"
- ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
- # Also trigger flotow, the ftpmaster test box
- ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
-}
-
-function merkel3() {
- # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
- log "Trigger merkels dd accessible parts sync"
- ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
-}
-
-function mirrorpush() {
- log "Starting the mirrorpush"
- date -u > /srv/ftp.debian.org/web/mirrorstart
- echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
- echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
- sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
-}
-
-function i18n2() {
- log "Exporting package data foo for i18n project"
- STAMP=$(date "+%Y%m%d%H%M")
- mkdir -p ${scriptdir}/i18n/${STAMP}
- cd ${scriptdir}/i18n/${STAMP}
- dak control-suite -l stable > lenny
- dak control-suite -l testing > squeeze
- dak control-suite -l unstable > sid
- echo "${STAMP}" > timestamp
- gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
- rm -f md5sum
- md5sum * > md5sum
- cd ${webdir}/
- ln -sfT ${scriptdir}/i18n/${STAMP} i18n
-
- cd ${scriptdir}
- find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
-}
-
-function stats() {
- log "Updating stats data"
- cd $configdir
- $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
- R --slave --vanilla < $base/misc/ftpstats.R
- dak stats arch-space > $webdir/arch-space
- dak stats pkg-nums > $webdir/pkg-nums
-}
-
-function aptftpcleanup() {
- log "Clean up apt-ftparchive's databases"
- cd $configdir
- apt-ftparchive -q clean apt.conf
-}
-
-function compress() {
- log "Compress old psql backups"
- cd $base/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
-
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
- while read dumpname; do
- echo "Compressing $dumpname"
- bzip2 -9fv "$dumpname"
- done
- find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
- while read dumpname; do
- echo "Compressing $dumpname"
- bzip2 -9fv "$dumpname"
- done
- finddup -l -d $base/backup
-}
-
-function logstats() {
- $masterdir/tools/logs.py "$1"
-}
-
-# save timestamp when we start
-function savetimestamp() {
- NOW=`date "+%Y.%m.%d-%H:%M:%S"`
- echo ${NOW} > "${dbdir}/dinstallstart"
-}
-
-function maillogfile() {
- cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
-}
-
-function renamelogfile() {
- if [ -f "${dbdir}/dinstallstart" ]; then
- NOW=$(cat "${dbdir}/dinstallstart")
-# maillogfile
- mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
- logstats "$logdir/dinstall_${NOW}.log"
- bzip2 -9 "$logdir/dinstall_${NOW}.log"
- else
- error "Problem, I don't know when dinstall started, unable to do log statistics."
- NOW=`date "+%Y.%m.%d-%H:%M:%S"`
-# maillogfile
- mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
- bzip2 -9 "$logdir/dinstall_${NOW}.log"
- fi
-}
-
-function testingsourcelist() {
- dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
-}
-
-# do a last run of process-unchecked before dinstall is on.
-function process_unchecked() {
- log "Processing the unchecked queue"
- acceptnew
- UNCHECKED_WITHOUT_LOCK="-p"
- do_unchecked
- sync_debbugs
-}
+# source the dinstall functions
+. "${configdir}/dinstall.functions"
########################################################################
########################################################################
exec >> "$LOGFILE" 2>&1
-# usually we are not using debug logs. Set to 1 if you want them.
-DEBUG=0
-
-# our name
-PROGRAM="dinstall"
-
-# where do we want mails to go? For example log entries made with error()
-if [ "x$(hostname -s)x" != "xriesx" ]; then
- # Not our ftpmaster host
- MAILTO=${MAILTO:-"root"}
-else
- # Yay, ftpmaster
- MAILTO=${MAILTO:-"ftpmaster@debian.org"}
-fi
+# And now source our default config
+. "${configdir}/dinstall.variables"
# Make sure we start out with a sane umask setting
umask 022
export LANG=C
export LC_ALL=C
-# How many logfiles to keep
-LOGROTATE=${LOGROTATE:-400}
-
-# Marker for dinstall start
-DINSTALLSTART="${lockdir}/dinstallstart"
-# Marker for dinstall end
-DINSTALLEND="${lockdir}/dinstallend"
-
touch "${DINSTALLSTART}"
ts "startup"
-# lock cron.unchecked (it immediately exits when this exists)
-LOCK_DAILY="$lockdir/daily.lock"
-
-# Lock cron.unchecked from doing work
-LOCK_ACCEPTED="$lockdir/unchecked.lock"
-
-# Lock process-new from doing work
-LOCK_NEW="$lockdir/processnew.lock"
-
-# This file is simply used to indicate to britney whether or not
-# the Packages file updates completed sucessfully. It's not a lock
-# from our point of view
-LOCK_BRITNEY="$lockdir/britney.lock"
-
-# If this file exists we exit immediately after the currently running
-# function is done
-LOCK_STOP="$lockdir/archive.stop"
-
lockfile -l 3600 "${LOCK_DAILY}"
trap onerror ERR
trap cleanup EXIT TERM HUP INT QUIT
)
stage $GO
-GO=(
- FUNC="notice"
- TIME=""
- ARGS=""
- ERR="false"
-)
-stage $GO
-
GO=(
FUNC="merkel1"
TIME="init"
ARGS="p-u-new"
ERR=""
)
-stage $GO
+### TODO: policy-new
+#stage $GO
GO=(
FUNC="opunew"
ARGS="o-p-u-new"
ERR=""
)
-stage $GO
+### TODO: policy-new
+#stage $GO
GO=(
FUNC="i18n1"
lockfile "$LOCK_NEW"
GO=(
- FUNC="process_unchecked"
- TIME=""
- ARGS=""
- ERR=""
-)
-stage $GO
-
-
-GO=(
- FUNC="accepted"
- TIME="accepted"
+ FUNC="process_upload"
+ TIME="process-upload"
ARGS=""
ERR=""
)
stage $GO
-GO=(
- FUNC="buildd_dir"
- TIME="buildd_dir"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
GO=(
FUNC="cruft"
TIME="cruft"
ARGS=""
ERR=""
)
-stage $GO
+### TODO: clean-* fixup
+#stage $GO
GO=(
FUNC="buildd"
--- /dev/null
+# Timestamp. Used for dinstall stat graphs
+function ts() {
+ echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
+}
+
+# Cleanup actions
+function cleanup() {
+ rm -f ${LOCK_DAILY}
+ rm -f ${LOCK_ACCEPTED}
+}
+
+# If we error out this one is called, *FOLLOWED* by cleanup above
+function onerror() {
+ ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
+
+ subject="ATTENTION ATTENTION!"
+ if [ "${error}" = "false" ]; then
+ subject="${subject} (continued)"
+ else
+ subject="${subject} (interrupted)"
+ fi
+ subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
+
+ cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org
+}
+
+########################################################################
+# the actual dinstall functions follow #
+########################################################################
+
+# pushing merkels QA user, part one
+function merkel1() {
+ log "Telling merkels QA user that we start dinstall"
+ ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
+}
+
+# Create the postgres dump files
+function pgdump_pre() {
+ log "Creating pre-daily-cron-job backup of projectb database..."
+ pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
+}
+
+function pgdump_post() {
+ log "Creating post-daily-cron-job backup of projectb database..."
+ cd $base/backup
+ POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
+ pg_dump projectb > $base/backup/dump_$POSTDUMP
+ pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
+ ln -sf $base/backup/dump_$POSTDUMP current
+ ln -sf $base/backup/dumpall_$POSTDUMP currentall
+}
+
+# Load the dak-dev projectb
+function pgdakdev() {
+ cd $base/backup
+ echo "drop database projectb" | psql -p 5433 template1
+ cat currentall | psql -p 5433 template1
+ createdb -p 5433 -T template0 projectb
+ fgrep -v '\connect' current | psql -p 5433 projectb
+}
+
+# Updating various files
+function updates() {
+ log "Updating Bugs docu, Mirror list and mailing-lists.txt"
+ cd $configdir
+ $scriptsdir/update-bugdoctxt
+ $scriptsdir/update-mirrorlists
+ $scriptsdir/update-mailingliststxt
+ $scriptsdir/update-pseudopackages.sh
+}
+
+# Process (oldstable)-proposed-updates "NEW" queue
+function punew_do() {
+ cd "${queuedir}/${1}"
+ date -u -R >> REPORT
+ dak process-new -a -C COMMENTS >> REPORT || true
+ echo >> REPORT
+}
+function punew() {
+ log "Doing automated p-u-new processing"
+ punew_do "$1"
+}
+function opunew() {
+ log "Doing automated o-p-u-new processing"
+ punew_do "$1"
+}
+
+# The first i18n one, syncing new descriptions
+function i18n1() {
+ log "Synchronizing i18n package descriptions"
+ # First sync their newest data
+ cd ${scriptdir}/i18nsync
+ rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
+
+ # Now check if we still know about the packages for which they created the files
+ # is the timestamp signed by us?
+ if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
+ # now read it. As its signed by us we are sure the content is what we expect, no need
+ # to do more here. And we only test -d a directory on it anyway.
+ TSTAMP=$(cat timestamp)
+ # do we have the dir still?
+ if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
+ # Lets check!
+ if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
+ # Yay, worked, lets copy around
+ for dir in squeeze sid; do
+ if [ -d dists/${dir}/ ]; then
+ cd dists/${dir}/main/i18n
+ rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
+ fi
+ cd ${scriptdir}/i18nsync
+ done
+ else
+ echo "ARRRR, bad guys, wrong files, ARRR"
+ echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
+ fi
+ else
+ echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
+ echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
+ fi
+ else
+ echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
+ echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
+ fi
+}
+
+function cruft() {
+ log "Checking for cruft in overrides"
+ dak check-overrides
+}
+
+function msfl() {
+ log "Generating suite file lists for apt-ftparchive"
+ dak make-suite-file-list
+}
+
+function fingerprints() {
+ log "Updating fingerprints"
+ dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
+
+ OUTFILE=$(mktemp)
+ dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
+
+ if [ -s "${OUTFILE}" ]; then
+ /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
+From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
+To: <debian-project@lists.debian.org>
+Subject: Debian Maintainers Keyring changes
+Content-Type: text/plain; charset=utf-8
+MIME-Version: 1.0
+
+The following changes to the debian-maintainers keyring have just been activated:
+
+$(cat $OUTFILE)
+
+Debian distribution maintenance software,
+on behalf of the Keyring maintainers
+
+EOF
+ fi
+ rm -f "$OUTFILE"
+}
+
+function overrides() {
+ log "Writing overrides into text files"
+ cd $overridedir
+ dak make-overrides
+
+ # FIXME
+ rm -f override.sid.all3
+ for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
+}
+
+function mpfm() {
+ log "Generating package / file mapping"
+ dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
+}
+
+function packages() {
+ log "Generating Packages and Sources files"
+ cd $configdir
+ GZIP='--rsyncable' ; export GZIP
+ apt-ftparchive generate apt.conf
+}
+
+function pdiff() {
+ log "Generating pdiff files"
+ dak generate-index-diffs
+}
+
+function release() {
+ log "Generating Release files"
+ dak generate-releases
+}
+
+function dakcleanup() {
+ log "Cleanup old packages/files"
+ dak clean-suites -m 10000
+ dak clean-queues
+}
+
+function buildd() {
+ # Needs to be rebuilt, as files have moved. Due to unaccepts, we need to
+ # update this before wanna-build is updated.
+ log "Regenerating wanna-build/buildd information"
+ psql projectb -A -t -q -c "SELECT build_queue.path || '/' || build_queue_files.filename FROM build_queue_files LEFT JOIN build_queue ON (build_queue.id =build_queue_files.build_queue_id) WHERE queue_name = 'buildd' AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list
+ symlinks -d /srv/incoming.debian.org/buildd > /dev/null
+ apt-ftparchive generate apt.conf.buildd
+}
+
+function buildd_dir() {
+ # Rebuilt the buildd dir to avoid long times of 403
+ log "Regenerating the buildd incoming dir"
+ STAMP=$(date "+%Y%m%d%H%M")
+ make_buildd_dir
+}
+
+function mklslar() {
+ cd $ftpdir
+
+ FILENAME=ls-lR
+
+ log "Removing any core files ..."
+ find -type f -name core -print0 | xargs -0r rm -v
+
+ log "Checking permissions on files in the FTP tree ..."
+ find -type f \( \! -perm -444 -o -perm +002 \) -ls
+ find -type d \( \! -perm -555 -o -perm +002 \) -ls
+
+ log "Checking symlinks ..."
+ symlinks -rd .
+
+ log "Creating recursive directory listing ... "
+ rm -f .${FILENAME}.new
+ TZ=UTC ls -lR > .${FILENAME}.new
+
+ if [ -r ${FILENAME}.gz ] ; then
+ mv -f ${FILENAME}.gz ${FILENAME}.old.gz
+ mv -f .${FILENAME}.new ${FILENAME}
+ rm -f ${FILENAME}.patch.gz
+ zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
+ rm -f ${FILENAME}.old.gz
+ else
+ mv -f .${FILENAME}.new ${FILENAME}
+ fi
+
+ gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
+ rm -f ${FILENAME}
+}
+
+function mkmaintainers() {
+ log -n 'Creating Maintainers index ... '
+
+ cd $indices
+ dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
+ sed -e "s/~[^ ]*\([ ]\)/\1/" | awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
+
+ set +e
+ cmp .new-maintainers Maintainers >/dev/null
+ rc=$?
+ set -e
+ if [ $rc = 1 ] || [ ! -f Maintainers ] ; then
+ log -n "installing Maintainers ... "
+ mv -f .new-maintainers Maintainers
+ gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
+ mv -f .new-maintainers.gz Maintainers.gz
+ elif [ $rc = 0 ] ; then
+ log '(same as before)'
+ rm -f .new-maintainers
+ else
+ log cmp returned $rc
+ false
+ fi
+}
+
+function copyoverrides() {
+ log 'Copying override files into public view ...'
+
+ for f in $copyoverrides ; do
+ cd $overridedir
+ chmod g+w override.$f
+
+ cd $indices
+ rm -f .newover-$f.gz
+ pc="`gzip 2>&1 -9nv <$overridedir/override.$f >.newover-$f.gz`"
+ set +e
+ nf=override.$f.gz
+ cmp -s .newover-$f.gz $nf
+ rc=$?
+ set -e
+ if [ $rc = 0 ]; then
+ rm -f .newover-$f.gz
+ elif [ $rc = 1 -o ! -f $nf ]; then
+ log " installing new $nf $pc"
+ mv -f .newover-$f.gz $nf
+ chmod g+w $nf
+ else
+ log $? $pc
+ exit 1
+ fi
+ done
+}
+
+function mkfilesindices() {
+ umask 002
+ cd $base/ftp/indices/files/components
+
+ ARCHLIST=$(tempfile)
+
+ log "Querying projectb..."
+ echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST
+
+ includedirs () {
+ perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
+ }
+ poolfirst () {
+ perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
+ }
+
+ log "Generating sources list
+ (
+ sed -n 's/|$//p' $ARCHLIST
+ cd $base/ftp
+ find ./dists -maxdepth 1 \! -type d
+ find ./dists \! -type d | grep "/source/"
+ ) | sort -u | gzip --rsyncable -9 > source.list.gz
+
+ log "Generating arch lists
+
+ ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
+ for a in $ARCHES; do
+ (sed -n "s/|$a$//p" $ARCHLIST
+ sed -n 's/|all$//p' $ARCHLIST
+
+ cd $base/ftp
+ find ./dists -maxdepth 1 \! -type d
+ find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
+ ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
+ done
+
+ log "Generating suite lists"
+
+ suite_list () {
+ printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb
+
+ printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb
+ }
+
+ printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb |
+ while read id suite; do
+ [ -e $base/ftp/dists/$suite ] || continue
+ (
+ (cd $base/ftp
+ distname=$(cd dists; readlink $suite || echo $suite)
+ find ./dists/$distname \! -type d
+ for distdir in ./dists/*; do
+ [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
+ done
+ )
+ suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,'
+ ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
+ done
+
+ log "Finding everything on the ftp site to generate sundries"
+
+ (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
+
+ rm -f sundries.list
+ zcat *.list.gz | cat - *.list | sort -u |
+ diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
+
+ log "Generating files list"
+
+ for a in $ARCHES; do
+ (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
+ cat - sundries.list dists.list project.list docs.list indices.list |
+ sort -u | poolfirst > ../arch-$a.files
+ done
+
+ (cd $base/ftp/
+ for dist in sid squeeze; do
+ find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
+ done
+ )
+
+ (cat ../arch-i386.files ../arch-amd64.files; zcat suite-oldstable.list.gz suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
+ sort -u | poolfirst > ../typical.files
+
+ rm -f $ARCHLIST
+ log "Done!"
+}
+
+function mkchecksums() {
+ dsynclist=$dbdir/dsync.list
+ md5list=$indices/md5sums
+
+ log -n "Creating md5 / dsync index file ... "
+
+ cd "$ftpdir"
+ ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
+ ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
+ ${bindir}/dsync-flist -q link-dups $dsynclist || true
+}
+
+function scripts() {
+ log "Running various scripts from $scriptsdir"
+ mkmaintainers
+ copyoverrides
+ mklslar
+ mkfilesindices
+ mkchecksums
+}
+
+function mirror() {
+ log "Regenerating \"public\" mirror/ hardlink fun"
+ cd ${mirrordir}
+ rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
+}
+
+function wb() {
+ log "Trigger daily wanna-build run"
+ ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org
+}
+
+function expire() {
+ log "Expiring old database dumps..."
+ cd $base/backup
+ $scriptsdir/expire_dumps -d . -p -f "dump_*"
+}
+
+function transitionsclean() {
+ log "Removing out of date transitions..."
+ cd $base
+ dak transitions -c -a
+}
+
+function reports() {
+ # Send a report on NEW/BYHAND packages
+ log "Nagging ftpteam about NEW/BYHAND packages"
+ dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
+ # and one on crufty packages
+ log "Sending information about crufty packages"
+ dak cruft-report > $webdir/cruft-report-daily.txt
+ dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
+ cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
+}
+
+function dm() {
+ log "Updating DM html page"
+ $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
+}
+
+function bts() {
+ log "Categorizing uncategorized bugs filed against ftp.debian.org"
+ dak bts-categorize
+}
+
+function merkel2() {
+ # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
+ log "Trigger merkel/flotows projectb sync"
+ ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
+ # Also trigger flotow, the ftpmaster test box
+ ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
+}
+
+function merkel3() {
+ # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
+ log "Trigger merkels dd accessible parts sync"
+ ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
+}
+
+function mirrorpush() {
+ log "Starting the mirrorpush"
+ date -u > /srv/ftp.debian.org/web/mirrorstart
+ echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
+ echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
+ sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
+}
+
+function i18n2() {
+ log "Exporting package data foo for i18n project"
+ STAMP=$(date "+%Y%m%d%H%M")
+ mkdir -p ${scriptdir}/i18n/${STAMP}
+ cd ${scriptdir}/i18n/${STAMP}
+ dak control-suite -l stable > lenny
+ dak control-suite -l testing > squeeze
+ dak control-suite -l unstable > sid
+ echo "${STAMP}" > timestamp
+ gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
+ rm -f md5sum
+ md5sum * > md5sum
+ cd ${webdir}/
+ ln -sfT ${scriptdir}/i18n/${STAMP} i18n
+
+ cd ${scriptdir}
+ find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
+}
+
+function stats() {
+ log "Updating stats data"
+ cd $configdir
+ $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
+ R --slave --vanilla < $base/misc/ftpstats.R
+ dak stats arch-space > $webdir/arch-space
+ dak stats pkg-nums > $webdir/pkg-nums
+}
+
+function aptftpcleanup() {
+ log "Clean up apt-ftparchive's databases"
+ cd $configdir
+ apt-ftparchive -q clean apt.conf
+}
+
+function compress() {
+ log "Compress old psql backups"
+ cd $base/backup/
+ find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
+
+ find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
+ while read dumpname; do
+ echo "Compressing $dumpname"
+ bzip2 -9fv "$dumpname"
+ done
+ find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
+ while read dumpname; do
+ echo "Compressing $dumpname"
+ bzip2 -9fv "$dumpname"
+ done
+ finddup -l -d $base/backup
+}
+
+function logstats() {
+ $masterdir/tools/logs.py "$1"
+}
+
+# save timestamp when we start
+function savetimestamp() {
+ NOW=`date "+%Y.%m.%d-%H:%M:%S"`
+ echo ${NOW} > "${dbdir}/dinstallstart"
+}
+
+function maillogfile() {
+ cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
+}
+
+function renamelogfile() {
+ if [ -f "${dbdir}/dinstallstart" ]; then
+ NOW=$(cat "${dbdir}/dinstallstart")
+# maillogfile
+ mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
+ logstats "$logdir/dinstall_${NOW}.log"
+ bzip2 -9 "$logdir/dinstall_${NOW}.log"
+ else
+ error "Problem, I don't know when dinstall started, unable to do log statistics."
+ NOW=`date "+%Y.%m.%d-%H:%M:%S"`
+# maillogfile
+ mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
+ bzip2 -9 "$logdir/dinstall_${NOW}.log"
+ fi
+}
+
+function testingsourcelist() {
+ dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
+}
+
+# do a last run of process-unchecked before dinstall is on.
+function process_unchecked() {
+ log "Processing the unchecked queue"
+ UNCHECKED_WITHOUT_LOCK="-p"
+ do_unchecked
+ sync_debbugs
+}