-#! /bin/sh
+#!/bin/bash
+# No way I try to deal with a crippled sh just for POSIX foo.
+
+# Copyright (C) 2009 Joerg Jaspert <joerg@debian.org>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; version 2.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
#
-# Executed daily via cron, out of dak's crontab.
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+# exit on errors
set -e
+# make sure to only use defined variables
+set -u
+
+# import the general variable set.
export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars
. $SCRIPTVARS
-################################################################################
-
-# Start logging
-NOW=`date "+%Y.%m.%d-%H:%M:%S"`
-LOGFILE="$logdir/dinstall_${NOW}.log"
-exec > "$LOGFILE" 2>&1
+########################################################################
+# Functions #
+########################################################################
+# log something (basically echo it together with a timestamp)
+#
+# Set $PROGRAM to a string to have it added to the output.
+function log () {
+ if [ -z "${PROGRAM}" ]; then
+ echo "$(date +"%b %d %H:%M:%S") $(hostname -s) [$$] $@"
+ else
+ echo "$(date +"%b %d %H:%M:%S") $(hostname -s) ${PROGRAM}[$$]: $@"
+ fi
+}
-ts() {
- TS=$(($TS+1));
- echo "Archive maintenance timestamp $TS ($1): $(date +%H:%M:%S)"
+# log the message using log() but then also send a mail
+# to the address configured in MAILTO (if non-empty)
+function log_error () {
+ log "$@"
+ if [ -z "${MAILTO}" ]; then
+ echo "$@" | mail -e -s "[$PROGRAM@$(hostname -s)] ERROR [$$]" ${MAILTO}
+ fi
}
-TS=-1
-ts "startup"
+# debug log, only output when DEBUG=1
+function debug () {
+ if [ $DEBUG -eq 1 ]; then
+ log "$*"
+ fi
+}
-NOTICE="$ftpdir/Archive_Maintenance_In_Progress"
-LOCKCU="$lockdir/daily.lock"
-LOCKAC="$lockdir/unchecked.lock"
-BRITNEYLOCK="$lockdir/britney.lock"
-lockac=0
-
-cleanup() {
- rm -f "$NOTICE"
- rm -f "$LOCKCU"
- if [ "$lockac" -eq "1" ]; then
- rm -f "$LOCKAC"
- fi
- echo "Cleanup"
-}
-lockfile -l 3600 $LOCKCU
-trap cleanup 0
+# Timestamp. Used for dinstall stat graphs
+function ts() {
+ TS=$(($TS+1));
+ echo "Archive maintenance timestamp $TS ($1): $(date +%H:%M:%S)"
+}
-# This file is simply used to indicate to britney whether or not
-# the Packages file updates completed sucessfully. It's not a lock
-# from our point of view
-touch ${BRITNEYLOCK}
+# Cleanup actions
+function cleanup() {
+ rm -f ${LOCK_DAILY}
+ rm -f ${LOCK_ACCEPTED}
+}
-rm -f "$NOTICE"
-cat > "$NOTICE" <<EOF
+# Setup the notice file to tell bad mirrors they used the wrong time
+function notice() {
+ rm -f "$NOTICE"
+ cat > "$NOTICE" <<EOF
Packages are currently being installed and indices rebuilt.
-Maintenance is automatic, starting at 07:52 and 19:52 UTC, and
-ending about an hour later. This file is then removed.
+Maintenance is automatic, starting at 01|07|13|19:52 UTC,
+and ending about an hour later. This file is then removed.
You should not mirror the archive during this period.
EOF
+}
+
+# pushing merkels QA user, part one
+function merkel1() {
+ log "Telling merkels QA user that we start dinstall"
+ ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
+}
+
+# Create the postgres dump files
+function pgdump_pre() {
+ log "Creating pre-daily-cron-job backup of projectb database..."
+ pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
+}
+
+function pgdump_post() {
+ log "Creating post-daily-cron-job backup of projectb database..."
+ cd $base/backup
+ POSTDUMP=$base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
+ pg_dump projectb > $POSTDUMP
+ ln -sf $POSTDUMP current
+}
+
+# Updating various files
+function updates() {
+ log "Updating Bugs docu, Mirror list and mailing-lists.txt"
+ cd $configdir
+ $scriptsdir/update-bugdoctxt
+ $scriptsdir/update-mirrorlists
+ $scriptsdir/update-mailingliststxt
+ $scriptsdir/update-pseudopackages.sh
+}
-# Push merkels qa user, so the qa pages can show "dinstall is running" information
-echo "Telling merkels QA user that we start dinstall"
-ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1 || true
-ts "init"
-
-################################################################################
-
-echo "Creating pre-daily-cron-job backup of projectb database..."
-pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
-ts "pg_dump1"
-
-################################################################################
-
-echo "Updating Bugs docu, Mirror list and mailing-lists.txt"
-cd $configdir
-$scriptsdir/update-bugdoctxt
-$scriptsdir/update-mirrorlists
-$scriptsdir/update-mailingliststxt
-$scriptsdir/update-pseudopackages.sh
-ts "External Updates"
-
-################################################################################
-
-echo "Doing automated p-u-new processing"
-cd $queuedir/p-u-new
-date -u -R >> REPORT
-dak process-new -a -C COMMENTS >> REPORT || true
-echo >> REPORT
-ts "p-u-new"
-
-echo "Doing automated o-p-u-new processing"
-cd $queuedir/o-p-u-new
-date -u -R >> REPORT
-dak process-new -a -C COMMENTS >> REPORT || true
-echo >> REPORT
-ts "o-p-u-new"
-
-################################################################################
-
-echo "Synchronizing i18n package descriptions"
-# First sync their newest data
-cd ${scriptdir}/i18nsync
-rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
-
-# Now check if we still know about the packages for which they created the files
-# is the timestamp signed by us?
-if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
- # now read it. As its signed by us we are sure the content is what we expect, no need
- # to do more here. And we only test -d a directory on it anyway.
- TSTAMP=$(cat timestamp)
- # do we have the dir still?
- if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
- # Lets check!
- if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
- # Yay, worked, lets copy around
- for dir in squeeze sid; do
- if [ -d dists/${dir}/ ]; then
- cd dists/${dir}/main/i18n
- rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
- fi
- cd ${scriptdir}/i18nsync
- done
- else
- echo "ARRRR, bad guys, wrong files, ARRR"
- echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
- fi
+# Process (oldstable)-proposed-updates "NEW" queue
+function punew_do() {
+ cd "${queuedir}/${1}"
+ date -u -R >> REPORT
+ dak process-new -a -C COMMENTS >> REPORT || true
+ echo >> REPORT
+}
+function punew() {
+ log "Doing automated p-u-new processing"
+ punew_do "$1"
+}
+function opunew() {
+ log "Doing automated o-p-u-new processing"
+ punew_do "$1"
+}
+
+# The first i18n one, syncing new descriptions
+function i18n1() {
+ log "Synchronizing i18n package descriptions"
+ # First sync their newest data
+ cd ${scriptdir}/i18nsync
+ rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
+
+ # Now check if we still know about the packages for which they created the files
+ # is the timestamp signed by us?
+ if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
+ # now read it. As its signed by us we are sure the content is what we expect, no need
+ # to do more here. And we only test -d a directory on it anyway.
+ TSTAMP=$(cat timestamp)
+ # do we have the dir still?
+ if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
+ # Lets check!
+ if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
+ # Yay, worked, lets copy around
+ for dir in squeeze sid; do
+ if [ -d dists/${dir}/ ]; then
+ cd dists/${dir}/main/i18n
+ rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
+ fi
+ cd ${scriptdir}/i18nsync
+ done
+ else
+ echo "ARRRR, bad guys, wrong files, ARRR"
+ echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
+ fi
+ else
+ echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
+ echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
+ fi
else
- echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
- echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
+ echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
+ echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
+ fi
+}
+
+# Process the accepted queue
+function accepted() {
+ log "Processing queue/accepted"
+ rm -f "$accepted/REPORT"
+ dak process-accepted -pa -d "$accepted" > "$accepted/REPORT"
+ cat REPORT | mail -s "Install for $(date +"%D - %R")" ftpmaster@ftp-master.debian.org
+ chgrp debadmin REPORT
+ chmod 664 REPORT
+}
+
+function cruft() {
+ log "Checking for cruft in overrides"
+ dak check-overrides
+
+ log "Fixing symlinks in $ftpdir"
+ symlinks -d -r $ftpdir
+}
+
+function msfl() {
+ log "Generating suite file lists for apt-ftparchive"
+ dak make-suite-file-list
+}
+
+function fingerprints() {
+ log "Updating fingerprints"
+ dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
+}
+
+function overrides() {
+ log "Writing overrides into text files"
+ cd $overridedir
+ dak make-overrides
+
+ # FIXME
+ rm -f override.sid.all3
+ for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
+}
+
+function mpfm() {
+ log "Generating package / file mapping"
+ dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
+}
+
+function packages() {
+ log "Generating Packages and Sources files"
+ cd $configdir
+ apt-ftparchive generate apt.conf
+}
+
+function pdiff() {
+ log "Generating pdiff files"
+ dak generate-index-diffs
+}
+
+function release() {
+ log "Generating Release files"
+ dak generate-releases
+}
+
+function dakcleanup() {
+ log "Cleanup old packages/files"
+ dak clean-suites
+ dak clean-queues
+}
+
+function buildd() {
+ # Needs to be rebuilt, as files have moved. Due to unaccepts, we need to
+ # update this before wanna-build is updated.
+ log "Regenerating wanna-build/buildd information"
+ psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list
+ symlinks -d /srv/incoming.debian.org/buildd > /dev/null
+ apt-ftparchive generate apt.conf.buildd
+}
+
+function scripts() {
+ log "Running various scripts from $scriptsdir"
+ cd $scriptsdir
+ ./mkmaintainers
+ ./copyoverrides
+ ./mklslar
+ ./mkfilesindices
+ ./mkchecksums
+}
+
+function mirror() {
+ echo "Regenerating \"public\" mirror/ hardlink fun"
+ cd ${mirrordir}
+ rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
+}
+
+function wb() {
+ log "Trigger daily wanna-build run"
+ ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org
+}
+
+function expire() {
+ log "Expiring old database dumps..."
+ cd $base/backup
+ $scriptsdir/expire_dumps -d . -p -f "dump_*"
+}
+
+function reports() {
+ # Send a report on NEW/BYHAND packages
+ log "Nagging ftpteam about NEW/BYHAND packages"
+ dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
+ # and one on crufty packages
+ log "Sending information about crufty packages"
+ dak cruft-report > $webdir/cruft-report-daily.txt
+ dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
+ cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
+}
+
+function dm() {
+ log "Updating DM html page"
+ $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
+}
+
+function bts() {
+ log "Categorizing uncategorized bugs filed against ftp.debian.org"
+ dak bts-categorize
+}
+
+function merkel2() {
+ # Push katie@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
+ log "Trigger merkels projectb sync"
+ ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb katie@merkel.debian.org sleep 1
+}
+
+function runparts() {
+ log "Using run-parts to run scripts in $base/scripts/distmnt"
+ run-parts --report $base/scripts/distmnt
+}
+
+function i18n2() {
+ log "Exporting package data foo for i18n project"
+ STAMP=$(date "+%Y%m%d%H%M")
+ mkdir -p ${scriptdir}/i18n/${STAMP}
+ cd ${scriptdir}/i18n/${STAMP}
+ dak control-suite -l stable > lenny
+ dak control-suite -l testing > squeeze
+ dak control-suite -l unstable > sid
+ echo "${STAMP}" > timestamp
+ gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o timestamp.gpg timestamp
+ rm -f md5sum
+ md5sum * > md5sum
+ cd ${webdir}/
+ ln -sfT ${scriptdir}/i18n/${STAMP} i18n
+
+ cd ${scriptdir}
+ find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
+}
+
+function stats() {
+ log "Updating stats data"
+ cd $configdir
+ $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
+ R --slave --vanilla < $base/misc/ftpstats.R
+}
+
+function aptftpcleanup() {
+ log "Clean up apt-ftparchive's databases"
+ cd $configdir
+ apt-ftparchive -q clean apt.conf
+}
+
+function compress() {
+ log "Compress old psql backups"
+ cd $base/backup/
+ find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +1 |
+ while read dumpname; do
+ echo "Compressing $dumpname"
+ bzip2 -9v "$dumpname"
+ done
+}
+
+function logstats() {
+ $masterdir/tools/logs.py "$LOGFILE"
+}
+
+########################################################################
+########################################################################
+
+# Function to save which stage we are in, so we can restart an interrupted
+# dinstall. Or even run actions in parallel, if we dare to, by simply
+# backgrounding the call to this function. But that should only really be
+# done for things we dont care much about.
+#
+# This should be called with the first argument being an array, with the
+# members
+# - FUNC - the function name to call
+# - ARGS - Possible arguments to hand to the function. Can be the empty string
+# - TS - The timestamp name. Can be the empty string
+# - ERR - if this is the string false, then the call will be surrounded by
+# set +e ... set -e calls, so errors in the function do not exit
+# dinstall. Can be the empty string, meaning true.
+#
+# MAKE SURE TO KEEP THIS THE LAST FUNCTION, AFTER ALL THE VARIOUS ONES
+# ADDED FOR DINSTALL FEATURES!
+function stage() {
+ ARGS='GO[@]'
+ local "${!ARGS}"
+
+ if [ -f "${stagedir}/${FUNC}" ]; then
+ stamptime=$(/usr/bin/stat -c %Z "${stagedir}/${FUNC}")
+ unixtime=$(date +%s)
+ difference=$(( $unixtime - $stamptime ))
+ if [ ${difference} -ge 14400 ]; then
+ log_error "Did already run ${FUNC}, stagefile exists, but that was ${difference} seconds ago. Please check."
+ else
+ log "Did already run ${FUNC}, not calling again..."
+ fi
+ return
+ fi
+
+ debug "Now calling function ${FUNC}. Arguments: ${ARGS}. Timestamp: ${TS}"
+
+ # Make sure we are always at the same place. If a function wants to be elsewhere,
+ # it has to cd first!
+ cd ${configdir}
+
+ if [ -f "${LOCK_STOP}" ]; then
+ log "${LOCK_STOP} exists, exiting immediately"
+ exit 42
+ fi
+
+ if [ "${ERR}" = "false" ]; then
+ set +e
+ fi
+ ${FUNC} ${ARGS}
+
+ # No matter what happened in the function, we make sure we have set -e default state back
+ set -e
+
+ # Make sure we are always at the same place.
+ cd ${configdir}
+
+ touch "${stagedir}/${FUNC}"
+
+ if [ -f "${LOCK_STOP}" ]; then
+ log "${LOCK_STOP} exists, exiting immediately"
+ exit 42
fi
+
+ if [ -n "${TIME}" ]; then
+ ts "${TIME}"
+ fi
+}
+
+########################################################################
+
+# We need logs.
+LOGFILE="$logdir/dinstall.log"
+
+exec >> "$LOGFILE" 2>&1
+
+# usually we are not using debug logs. Set to 1 if you want them.
+DEBUG=0
+
+# our name
+PROGRAM="dinstall"
+
+# where do we want mails to go? For example log entries made with error()
+if [ "x$(hostname -s)x" != "xriesx" ]; then
+ # Not our ftpmaster host
+ MAILTO=${MAILTO:-"root"}
else
- echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
- echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
+ # Yay, ftpmaster
+ MAILTO=${MAILTO:-"ftpmaster@debian.org"}
fi
-ts "i18n 1"
-
-################################################################################
-
-lockfile $LOCKAC
-lockac=1
-echo "Processing queue/accepted"
-cd $accepted
-rm -f REPORT
-dak process-accepted -pa *.changes | tee REPORT | \
- mail -s "Install for $(date +%D)" ftpmaster@ftp-master.debian.org
-chgrp debadmin REPORT
-chmod 664 REPORT
-ts "accepted"
-
-echo "Checking for cruft in overrides"
-dak check-overrides
-rm -f $LOCKAC
-lockac=0
-
-echo "Fixing symlinks in $ftpdir"
-symlinks -d -r $ftpdir
-ts "cruft"
-
-echo "Generating suite file lists for apt-ftparchive"
-dak make-suite-file-list
-ts "make-suite-file-list"
-
-echo "Updating fingerprints"
-# Update fingerprints
-dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg || true
-ts "import-keyring"
-
-# Generate override files
-echo "Writing overrides into text files"
-cd $overridedir
-dak make-overrides
-
-# FIXME
-rm -f override.sid.all3
-for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
-ts "overrides"
-
-# Generate package / file mapping
-echo "Generating package / file mapping"
-dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
-ts "pkg-file-mapping"
-
-# Generate Packages and Sources files
-echo "Generating Packages and Sources files"
-cd $configdir
-apt-ftparchive generate apt.conf
-ts "apt-ftparchive"
-
-# Generate *.diff/ incremental updates
-echo "Generating pdiff files"
-dak generate-index-diffs
-ts "pdiff"
-
-# Generate Release files
-echo "Generating Release files"
-dak generate-releases
-ts "release files"
-
-# Clean out old packages
-echo "Cleanup old packages/files"
-dak clean-suites
-dak clean-queues
-ts "cleanup"
-
-# Needs to be rebuilt, as files have moved. Due to unaccepts, we need to
-# update this before wanna-build is updated.
-echo "Regenerating wanna-build/buildd information"
-psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list
-symlinks -d /srv/incoming.debian.org/buildd > /dev/null
-apt-ftparchive generate apt.conf.buildd
-ts "buildd"
-
-echo "Running various scripts from $scriptsdir"
-cd $scriptsdir
-./mkmaintainers
-./copyoverrides
-./mklslar
-./mkfilesindices
-./mkchecksums
-ts "scripts"
-
-# (Re)generate the hardlinked mirror directory for "public" buildd / mirror access
-echo "Regenerating mirror/ hardlink fun"
-cd ${mirrordir}
-rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
-ts "mirror hardlinks"
-
-echo "Trigger daily wanna-build run"
-ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org
-ts "w-b"
-
-rm -f $NOTICE
-rm -f $LOCKCU
-ts "locked part finished"
+# How many logfiles to keep
+LOGROTATE=${LOGROTATE:-400}
+
+# Timestamps start at -1. so first gets 0
+TS=-1
+ts "startup"
+
+# Tell everyone we are doing some work
+NOTICE="$ftpdir/Archive_Maintenance_In_Progress"
+
+# lock cron.unchecked (it immediately exits when this exists)
+LOCK_DAILY="$lockdir/daily.lock"
+
+# Lock process-new and cron.unchecked from doing work
+LOCK_ACCEPTED="$lockdir/unchecked.lock"
+
+# This file is simply used to indicate to britney whether or not
+# the Packages file updates completed sucessfully. It's not a lock
+# from our point of view
+LOCK_BRITNEY="$lockdir/britney.lock"
+
+# If this file exists we exit immediately after the currently running
+# function is done
+LOCK_STOP="$lockdir/archive.stop"
+
+lockfile -l 3600 "${LOCK_DAILY}"
+trap cleanup EXIT ERR TERM HUP INT QUIT
+
+touch "${LOCK_BRITNEY}"
-################################################################################
+GO=(
+ FUNC="notice"
+ TIME=""
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
+GO=(
+ FUNC="merkel1"
+ TIME="init"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
+GO=(
+ FUNC="pgdump_pre"
+ TIME="pg_dump1"
+ ARGS=""
+ ERR=""
+)
+stage $GO
-echo "Creating post-daily-cron-job backup of projectb database..."
-POSTDUMP=$base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
-pg_dump projectb > $POSTDUMP
-(cd $base/backup; ln -sf $POSTDUMP current)
-ts "pg_dump2"
+GO=(
+ FUNC="updates"
+ TIME="External Updates"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
-################################################################################
+GO=(
+ FUNC="punew"
+ TIME="p-u-new"
+ ARGS="p-u-new"
+ ERR=""
+)
+stage $GO
+GO=(
+ FUNC="opunew"
+ TIME="o-p-u-new"
+ ARGS="o-p-u-new"
+ ERR=""
+)
+stage $GO
-echo "Expiring old database dumps..."
-(cd $base/backup; $scriptsdir/expire_dumps -d . -p -f "dump_*")
-ts "expire_dumps"
+GO=(
+ FUNC="i18n1"
+ TIME="i18n 1"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
-################################################################################
+lockfile "$LOCK_ACCEPTED"
+GO=(
+ FUNC="accepted"
+ TIME="accepted"
+ ARGS=""
+ ERR=""
+)
+stage $GO
-# Send a report on NEW/BYHAND packages
-echo "Nagging ftpteam about NEW/BYHAND packages"
-dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
-# and one on crufty packages
-echo "Sending information about crufty packages"
-dak cruft-report > $webdir/cruft-report-daily.txt
-dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
-cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
-ts "reports"
+GO=(
+ FUNC="cruft"
+ TIME="cruft"
+ ARGS=""
+ ERR=""
+)
+stage $GO
-echo "Updating DM html page"
-$scriptsdir/dm-monitor >$webdir/dm-uploaders.html
+rm -f "$LOCK_ACCEPTED"
-echo "Categorizing uncategorized bugs filed against ftp.debian.org"
-dak bts-categorize
+GO=(
+ FUNC="msfl"
+ TIME="make-suite-file-list"
+ ARGS=""
+ ERR=""
+)
+stage $GO
-################################################################################
+GO=(
+ FUNC="fingerprints"
+ TIME="import-keyring"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
-# Push katie@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
-echo "Trigger merkels projectb sync"
-ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb katie@merkel.debian.org sleep 1 || true
-ts "merkel projectb push"
+GO=(
+ FUNC="overrides"
+ TIME="overrides"
+ ARGS=""
+ ERR=""
+)
+stage $GO
-################################################################################
+GO=(
+ FUNC="mpfm"
+ TIME="pkg-file-mapping"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+GO=(
+ FUNC="packages"
+ TIME="apt-ftparchive"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="pdiff"
+ TIME="pdiff"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="release"
+ TIME="release files"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="dakcleanup"
+ TIME="cleanup"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="buildd"
+ TIME="buildd"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="scripts"
+ TIME="scripts"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="mirror"
+ TIME="mirror hardlinks"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="wb"
+ TIME="w-b"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+rm -f "${NOTICE}"
+rm -f "${LOCK_DAILY}"
+
+ts "locked part finished"
+
+GO=(
+ FUNC="pgdump_post"
+ TIME="pg_dump2"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="expire"
+ TIME="expire_dumps"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="reports"
+ TIME="reports"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="dm"
+ TIME=""
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="bts"
+ TIME=""
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="merkel2"
+ TIME="merkel projectb push"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
ulimit -m 90000 -d 90000 -s 10000 -v 200000
-echo "Using run-parts to run scripts in $base/scripts/distmnt"
-run-parts --report $base/scripts/distmnt
-ts "run-parts"
-
-echo "Exporting package data foo for i18n project"
-STAMP=$(date "+%Y%m%d%H%M")
-mkdir -p ${scriptdir}/i18n/${STAMP}
-cd ${scriptdir}/i18n/${STAMP}
-dak control-suite -l stable > lenny
-dak control-suite -l testing > squeeze
-dak control-suite -l unstable > sid
-echo "${STAMP}" > timestamp
-gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o timestamp.gpg timestamp
-rm -f md5sum
-md5sum * > md5sum
-cd ${webdir}/
-ln -sfT ${scriptdir}/i18n/${STAMP} i18n
-
-cd ${scriptdir}
-find ./i18n -mtime +2 -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
-ts "i18n 2"
-
-echo "Daily cron scripts successful."
-
-# Stats pr0n
-echo "Updating stats data"
-cd $configdir
-$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
-R --slave --vanilla < $base/misc/ftpstats.R
-ts "stats"
-
-# Remove the britney lock
-rm -f ${BRITNEYLOCK}
-
-# Clean up apt-ftparchive's databases
-echo "Clean up apt-ftparchive's databases"
-cd $configdir
-apt-ftparchive -q clean apt.conf
-ts "apt-ftparchive cleanup"
-
-# Compress psql backups
-echo "Compress old psql backups"
-(cd $base/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +1 |
- while read dumpname; do
- echo "Compressing $dumpname"
- bzip2 -9 "$dumpname"
- done
-)
-ts "compress"
-
-echo "Removing old dinstall logfiles"
-(cd $logdir
- find -maxdepth 1 -mindepth 1 -type f -name 'dinstall_*' -mtime +60 |
- while read dumpname; do
- echo "Removing $dumpname"
- rm -f "$dumpname"
- done
-
- find -maxdepth 1 -mindepth 1 -type f -name 'weekly_*' -mtime +60 |
- while read dumpname; do
- echo "Removing $dumpname"
- rm -f "$dumpname"
- done
-)
-ts "logremove"
-
-echo "Finally, all is done, sending mail and compressing logfile"
+GO=(
+ FUNC="runparts"
+ TIME="run-parts"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
+GO=(
+ FUNC="i18n2"
+ TIME="i18n 2"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
+GO=(
+ FUNC="stats"
+ TIME="stats"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
+rm -f ${LOCK_BRITNEY}
+
+GO=(
+ FUNC="aptftpcleanup"
+ TIME="apt-ftparchive cleanup"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+GO=(
+ FUNC="compress"
+ TIME="compress"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
+log "Daily cron scripts successful, all done"
+
exec > /dev/null 2>&1
-$masterdir/tools/logs.py "$LOGFILE"
+GO=(
+ FUNC="logstats"
+ TIME=""
+ ARGS=""
+ ERR=""
+)
+stage $GO
cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
-bzip2 -9 "$LOGFILE"
-################################################################################
+savelog -c ${LOGROTATE} -j "$LOGFILE"
+
+# Now, at the very (successful) end of dinstall, make sure we remove
+# our stage files, so the next dinstall run will do it all again.
+rm -f "${stagedir}/*"
+++ /dev/null
-#!/bin/bash
-# No way I try to deal with a crippled sh just for POSIX foo.
-
-# Copyright (C) 2009 Joerg Jaspert <joerg@debian.org>
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as
-# published by the Free Software Foundation; version 2.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
-
-# exit on errors
-set -e
-# make sure to only use defined variables
-set -u
-
-# import the general variable set.
-export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars
-. $SCRIPTVARS
-
-########################################################################
-# Functions #
-########################################################################
-# log something (basically echo it together with a timestamp)
-#
-# Set $PROGRAM to a string to have it added to the output.
-function log () {
- if [ -z "${PROGRAM}" ]; then
- echo "$(date +"%b %d %H:%M:%S") $(hostname -s) [$$] $@"
- else
- echo "$(date +"%b %d %H:%M:%S") $(hostname -s) ${PROGRAM}[$$]: $@"
- fi
-}
-
-# log the message using log() but then also send a mail
-# to the address configured in MAILTO (if non-empty)
-function log_error () {
- log "$@"
- if [ -z "${MAILTO}" ]; then
- echo "$@" | mail -e -s "[$PROGRAM@$(hostname -s)] ERROR [$$]" ${MAILTO}
- fi
-}
-
-# debug log, only output when DEBUG=1
-function debug () {
- if [ $DEBUG -eq 1 ]; then
- log "$*"
- fi
-}
-
-# Timestamp. Used for dinstall stat graphs
-function ts() {
- TS=$(($TS+1));
- echo "Archive maintenance timestamp $TS ($1): $(date +%H:%M:%S)"
-}
-
-# Cleanup actions
-function cleanup() {
- rm -f ${LOCK_DAILY}
- rm -f ${LOCK_ACCEPTED}
-}
-
-# Setup the notice file to tell bad mirrors they used the wrong time
-function notice() {
- rm -f "$NOTICE"
- cat > "$NOTICE" <<EOF
-Packages are currently being installed and indices rebuilt.
-Maintenance is automatic, starting at 01|07|13|19:52 UTC,
-and ending about an hour later. This file is then removed.
-
-You should not mirror the archive during this period.
-EOF
-}
-
-# pushing merkels QA user, part one
-function merkel1() {
- log "Telling merkels QA user that we start dinstall"
- ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
-}
-
-# Create the postgres dump files
-function pgdump_pre() {
- log "Creating pre-daily-cron-job backup of projectb database..."
- pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
-}
-
-function pgdump_post() {
- log "Creating post-daily-cron-job backup of projectb database..."
- cd $base/backup
- POSTDUMP=$base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
- pg_dump projectb > $POSTDUMP
- ln -sf $POSTDUMP current
-}
-
-# Updating various files
-function updates() {
- log "Updating Bugs docu, Mirror list and mailing-lists.txt"
- cd $configdir
- $scriptsdir/update-bugdoctxt
- $scriptsdir/update-mirrorlists
- $scriptsdir/update-mailingliststxt
- $scriptsdir/update-pseudopackages.sh
-}
-
-# Process (oldstable)-proposed-updates "NEW" queue
-function punew_do() {
- cd "${queuedir}/${1}"
- date -u -R >> REPORT
- dak process-new -a -C COMMENTS >> REPORT || true
- echo >> REPORT
-}
-function punew() {
- log "Doing automated p-u-new processing"
- punew_do "$1"
-}
-function opunew() {
- log "Doing automated o-p-u-new processing"
- punew_do "$1"
-}
-
-# The first i18n one, syncing new descriptions
-function i18n1() {
- log "Synchronizing i18n package descriptions"
- # First sync their newest data
- cd ${scriptdir}/i18nsync
- rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
-
- # Now check if we still know about the packages for which they created the files
- # is the timestamp signed by us?
- if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
- # now read it. As its signed by us we are sure the content is what we expect, no need
- # to do more here. And we only test -d a directory on it anyway.
- TSTAMP=$(cat timestamp)
- # do we have the dir still?
- if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
- # Lets check!
- if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
- # Yay, worked, lets copy around
- for dir in squeeze sid; do
- if [ -d dists/${dir}/ ]; then
- cd dists/${dir}/main/i18n
- rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
- fi
- cd ${scriptdir}/i18nsync
- done
- else
- echo "ARRRR, bad guys, wrong files, ARRR"
- echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
- fi
- else
- echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
- echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
- fi
- else
- echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
- echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
- fi
-}
-
-# Process the accepted queue
-function accepted() {
- log "Processing queue/accepted"
- rm -f "$accepted/REPORT"
- dak process-accepted -pa -d "$accepted" > "$accepted/REPORT"
- cat REPORT | mail -s "Install for $(date +"%D - %R")" ftpmaster@ftp-master.debian.org
- chgrp debadmin REPORT
- chmod 664 REPORT
-}
-
-function cruft() {
- log "Checking for cruft in overrides"
- dak check-overrides
-
- log "Fixing symlinks in $ftpdir"
- symlinks -d -r $ftpdir
-}
-
-function msfl() {
- log "Generating suite file lists for apt-ftparchive"
- dak make-suite-file-list
-}
-
-function fingerprints() {
- log "Updating fingerprints"
- dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
-}
-
-function overrides() {
- log "Writing overrides into text files"
- cd $overridedir
- dak make-overrides
-
- # FIXME
- rm -f override.sid.all3
- for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
-}
-
-function mpfm() {
- log "Generating package / file mapping"
- dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
-}
-
-function packages() {
- log "Generating Packages and Sources files"
- cd $configdir
- apt-ftparchive generate apt.conf
-}
-
-function pdiff() {
- log "Generating pdiff files"
- dak generate-index-diffs
-}
-
-function release() {
- log "Generating Release files"
- dak generate-releases
-}
-
-function dakcleanup() {
- log "Cleanup old packages/files"
- dak clean-suites
- dak clean-queues
-}
-
-function buildd() {
- # Needs to be rebuilt, as files have moved. Due to unaccepts, we need to
- # update this before wanna-build is updated.
- log "Regenerating wanna-build/buildd information"
- psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list
- symlinks -d /srv/incoming.debian.org/buildd > /dev/null
- apt-ftparchive generate apt.conf.buildd
-}
-
-function scripts() {
- log "Running various scripts from $scriptsdir"
- cd $scriptsdir
- ./mkmaintainers
- ./copyoverrides
- ./mklslar
- ./mkfilesindices
- ./mkchecksums
-}
-
-function mirror() {
- echo "Regenerating \"public\" mirror/ hardlink fun"
- cd ${mirrordir}
- rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
-}
-
-function wb() {
- log "Trigger daily wanna-build run"
- ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org
-}
-
-function expire() {
- log "Expiring old database dumps..."
- cd $base/backup
- $scriptsdir/expire_dumps -d . -p -f "dump_*"
-}
-
-function reports() {
- # Send a report on NEW/BYHAND packages
- log "Nagging ftpteam about NEW/BYHAND packages"
- dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
- # and one on crufty packages
- log "Sending information about crufty packages"
- dak cruft-report > $webdir/cruft-report-daily.txt
- dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
- cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
-}
-
-function dm() {
- log "Updating DM html page"
- $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
-}
-
-function bts() {
- log "Categorizing uncategorized bugs filed against ftp.debian.org"
- dak bts-categorize
-}
-
-function merkel2() {
- # Push katie@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
- log "Trigger merkels projectb sync"
- ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb katie@merkel.debian.org sleep 1
-}
-
-function runparts() {
- log "Using run-parts to run scripts in $base/scripts/distmnt"
- run-parts --report $base/scripts/distmnt
-}
-
-function i18n2() {
- log "Exporting package data foo for i18n project"
- STAMP=$(date "+%Y%m%d%H%M")
- mkdir -p ${scriptdir}/i18n/${STAMP}
- cd ${scriptdir}/i18n/${STAMP}
- dak control-suite -l stable > lenny
- dak control-suite -l testing > squeeze
- dak control-suite -l unstable > sid
- echo "${STAMP}" > timestamp
- gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o timestamp.gpg timestamp
- rm -f md5sum
- md5sum * > md5sum
- cd ${webdir}/
- ln -sfT ${scriptdir}/i18n/${STAMP} i18n
-
- cd ${scriptdir}
- find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
-}
-
-function stats() {
- log "Updating stats data"
- cd $configdir
- $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
- R --slave --vanilla < $base/misc/ftpstats.R
-}
-
-function aptftpcleanup() {
- log "Clean up apt-ftparchive's databases"
- cd $configdir
- apt-ftparchive -q clean apt.conf
-}
-
-function compress() {
- log "Compress old psql backups"
- cd $base/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +1 |
- while read dumpname; do
- echo "Compressing $dumpname"
- bzip2 -9v "$dumpname"
- done
-}
-
-function logstats() {
- $masterdir/tools/logs.py "$LOGFILE"
-}
-
-########################################################################
-########################################################################
-
-# Function to save which stage we are in, so we can restart an interrupted
-# dinstall. Or even run actions in parallel, if we dare to, by simply
-# backgrounding the call to this function. But that should only really be
-# done for things we dont care much about.
-#
-# This should be called with the first argument being an array, with the
-# members
-# - FUNC - the function name to call
-# - ARGS - Possible arguments to hand to the function. Can be the empty string
-# - TS - The timestamp name. Can be the empty string
-# - ERR - if this is the string false, then the call will be surrounded by
-# set +e ... set -e calls, so errors in the function do not exit
-# dinstall. Can be the empty string, meaning true.
-#
-# MAKE SURE TO KEEP THIS THE LAST FUNCTION, AFTER ALL THE VARIOUS ONES
-# ADDED FOR DINSTALL FEATURES!
-function stage() {
- ARGS='GO[@]'
- local "${!ARGS}"
-
- if [ -f "${stagedir}/${FUNC}" ]; then
- stamptime=$(/usr/bin/stat -c %Z "${stagedir}/${FUNC}")
- unixtime=$(date +%s)
- difference=$(( $unixtime - $stamptime ))
- if [ ${difference} -ge 14400 ]; then
- log_error "Did already run ${FUNC}, stagefile exists, but that was ${difference} seconds ago. Please check."
- else
- log "Did already run ${FUNC}, not calling again..."
- fi
- return
- fi
-
- debug "Now calling function ${FUNC}. Arguments: ${ARGS}. Timestamp: ${TS}"
-
- # Make sure we are always at the same place. If a function wants to be elsewhere,
- # it has to cd first!
- cd ${configdir}
-
- if [ -f "${LOCK_STOP}" ]; then
- log "${LOCK_STOP} exists, exiting immediately"
- exit 42
- fi
-
- if [ "${ERR}" = "false" ]; then
- set +e
- fi
- ${FUNC} ${ARGS}
-
- # No matter what happened in the function, we make sure we have set -e default state back
- set -e
-
- # Make sure we are always at the same place.
- cd ${configdir}
-
- touch "${stagedir}/${FUNC}"
-
- if [ -f "${LOCK_STOP}" ]; then
- log "${LOCK_STOP} exists, exiting immediately"
- exit 42
- fi
-
- if [ -n "${TIME}" ]; then
- ts "${TIME}"
- fi
-}
-
-########################################################################
-
-# We need logs.
-LOGFILE="$logdir/dinstall.log"
-
-exec >> "$LOGFILE" 2>&1
-
-# usually we are not using debug logs. Set to 1 if you want them.
-DEBUG=0
-
-# our name
-PROGRAM="dinstall"
-
-# where do we want mails to go? For example log entries made with error()
-if [ "x$(hostname -s)x" != "xriesx" ]; then
- # Not our ftpmaster host
- MAILTO=${MAILTO:-"root"}
-else
- # Yay, ftpmaster
- MAILTO=${MAILTO:-"ftpmaster@debian.org"}
-fi
-
-# How many logfiles to keep
-LOGROTATE=${LOGROTATE:-400}
-
-# Timestamps start at -1. so first gets 0
-TS=-1
-ts "startup"
-
-# Tell everyone we are doing some work
-NOTICE="$ftpdir/Archive_Maintenance_In_Progress"
-
-# lock cron.unchecked (it immediately exits when this exists)
-LOCK_DAILY="$lockdir/daily.lock"
-
-# Lock process-new and cron.unchecked from doing work
-LOCK_ACCEPTED="$lockdir/unchecked.lock"
-
-# This file is simply used to indicate to britney whether or not
-# the Packages file updates completed sucessfully. It's not a lock
-# from our point of view
-LOCK_BRITNEY="$lockdir/britney.lock"
-
-# If this file exists we exit immediately after the currently running
-# function is done
-LOCK_STOP="$lockdir/archive.stop"
-
-lockfile -l 3600 "${LOCK_DAILY}"
-trap cleanup EXIT ERR TERM HUP INT QUIT
-
-touch "${LOCK_BRITNEY}"
-
-GO=(
- FUNC="notice"
- TIME=""
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-GO=(
- FUNC="merkel1"
- TIME="init"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-GO=(
- FUNC="pgdump_pre"
- TIME="pg_dump1"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="updates"
- TIME="External Updates"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-GO=(
- FUNC="punew"
- TIME="p-u-new"
- ARGS="p-u-new"
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="opunew"
- TIME="o-p-u-new"
- ARGS="o-p-u-new"
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="i18n1"
- TIME="i18n 1"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-lockfile "$LOCK_ACCEPTED"
-
-GO=(
- FUNC="accepted"
- TIME="accepted"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="cruft"
- TIME="cruft"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-rm -f "$LOCK_ACCEPTED"
-
-GO=(
- FUNC="msfl"
- TIME="make-suite-file-list"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="fingerprints"
- TIME="import-keyring"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-GO=(
- FUNC="overrides"
- TIME="overrides"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="mpfm"
- TIME="pkg-file-mapping"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-GO=(
- FUNC="packages"
- TIME="apt-ftparchive"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="pdiff"
- TIME="pdiff"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="release"
- TIME="release files"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="dakcleanup"
- TIME="cleanup"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="buildd"
- TIME="buildd"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="scripts"
- TIME="scripts"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="mirror"
- TIME="mirror hardlinks"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="wb"
- TIME="w-b"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-rm -f "${NOTICE}"
-rm -f "${LOCK_DAILY}"
-
-ts "locked part finished"
-
-GO=(
- FUNC="pgdump_post"
- TIME="pg_dump2"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="expire"
- TIME="expire_dumps"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="reports"
- TIME="reports"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="dm"
- TIME=""
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="bts"
- TIME=""
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="merkel2"
- TIME="merkel projectb push"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-ulimit -m 90000 -d 90000 -s 10000 -v 200000
-
-GO=(
- FUNC="runparts"
- TIME="run-parts"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-GO=(
- FUNC="i18n2"
- TIME="i18n 2"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-GO=(
- FUNC="stats"
- TIME="stats"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
-rm -f ${LOCK_BRITNEY}
-
-GO=(
- FUNC="aptftpcleanup"
- TIME="apt-ftparchive cleanup"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-GO=(
- FUNC="compress"
- TIME="compress"
- ARGS=""
- ERR=""
-)
-stage $GO
-
-log "Daily cron scripts successful, all done"
-
-exec > /dev/null 2>&1
-
-GO=(
- FUNC="logstats"
- TIME=""
- ARGS=""
- ERR=""
-)
-stage $GO
-
-cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
-
-savelog -c ${LOGROTATE} -j "$LOGFILE"
-
-# Now, at the very (successful) end of dinstall, make sure we remove
-# our stage files, so the next dinstall run will do it all again.
-rm -f "${stagedir}/*"