X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=config%2Fdebian%2Fcron.dinstall;h=9c9840d7c45e27213cedf62a65d7e44e443fdb0d;hb=d0b5ef925b72dc0610d34037606f15cb81e02e7c;hp=2478ed4bf3db69489d42ea598e9d6f08cbbd672d;hpb=9381647a4a8f593a3e86769fad910f99ce822ca2;p=dak.git diff --git a/config/debian/cron.dinstall b/config/debian/cron.dinstall index 2478ed4b..9c9840d7 100755 --- a/config/debian/cron.dinstall +++ b/config/debian/cron.dinstall @@ -1,268 +1,807 @@ -#! /bin/sh +#!/bin/bash +# No way I try to deal with a crippled sh just for POSIX foo. + +# Copyright (C) 2009 Joerg Jaspert +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; version 2. # -# Executed daily via cron, out of dak's crontab. +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. +# exit on errors set -e +# make sure to only use defined variables +set -u + +# import the general variable set. export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars . $SCRIPTVARS -################################################################################ +######################################################################## +# Functions # +######################################################################## +# common functions are "outsourced" +. "${configdir}/common" -# Start logging -NOW=`date "+%Y.%m.%d-%H:%M:%S"` -LOGFILE="$logdir/dinstall_${NOW}.log" -exec > "$LOGFILE" 2>&1 +# Timestamp. Used for dinstall stat graphs +function ts() { + TS=$(($TS+1)); + echo "Archive maintenance timestamp $TS ($1): $(date +%H:%M:%S)" +} + +# Cleanup actions +function cleanup() { + rm -f ${LOCK_DAILY} + rm -f ${LOCK_ACCEPTED} +} -ts() { - TS=$(($TS+1)); - echo Archive maintenance timestamp $TS: $(date +%H:%M:%S) +# If we error out this one is called, *FOLLOWED* by cleanup above +function onerror() { + ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S") + cat "$LOGFILE" | mail -s "ATTENTION ATTENTION! dinstall error at ${ERRDATE} (Be quiet, Brain, or I'll stab you with a Q-tip)" cron@ftp-master.debian.org } -echo Archive maintenance started at $(date +%H:%M:%S) -TS=0 +######################################################################## +# the actual dinstall functions follow # +######################################################################## -NOTICE="$ftpdir/Archive_Maintenance_In_Progress" -LOCKCU="$lockdir/daily.lock" -LOCKAC="$lockdir/unchecked.lock" -lockac=0 - -cleanup() { - rm -f "$NOTICE" - rm -f "$LOCKCU" - if [ "$lockac" -eq "1" ]; then - rm -f "$LOCKAC" - fi - echo "Cleanup" -} -lockfile -l 3600 $LOCKCU -trap cleanup 0 - -rm -f "$NOTICE" -cat > "$NOTICE" < "$NOTICE" < $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) +} + +function pgdump_post() { + log "Creating post-daily-cron-job backup of projectb database..." + cd $base/backup + POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S) + pg_dump projectb > $base/backup/dump_$POSTDUMP + pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP + ln -sf $base/backup/dump_$POSTDUMP current + ln -sf $base/backup/dumpall_$POSTDUMP currentall +} + +# Load the dak-dev projectb +function pgdakdev() { + cd $base/backup + echo "drop database projectb" | psql -p 5433 template1 + cat currentall | psql -p 5433 template1 + createdb -p 5433 -T template0 projectb + fgrep -v '\connect' current | psql -p 5433 projectb +} + +# Updating various files +function updates() { + log "Updating Bugs docu, Mirror list and mailing-lists.txt" + cd $configdir + $scriptsdir/update-bugdoctxt + $scriptsdir/update-mirrorlists + $scriptsdir/update-mailingliststxt + $scriptsdir/update-pseudopackages.sh +} + +# Process (oldstable)-proposed-updates "NEW" queue +function punew_do() { + cd "${queuedir}/${1}" + date -u -R >> REPORT + dak process-new -a -C COMMENTS >> REPORT || true + echo >> REPORT +} +function punew() { + log "Doing automated p-u-new processing" + punew_do "$1" +} +function opunew() { + log "Doing automated o-p-u-new processing" + punew_do "$1" +} + +# The first i18n one, syncing new descriptions +function i18n1() { + log "Synchronizing i18n package descriptions" + # First sync their newest data + cd ${scriptdir}/i18nsync + rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true + + # Now check if we still know about the packages for which they created the files + # is the timestamp signed by us? + if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then + # now read it. As its signed by us we are sure the content is what we expect, no need + # to do more here. And we only test -d a directory on it anyway. + TSTAMP=$(cat timestamp) + # do we have the dir still? + if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then + # Lets check! + if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then + # Yay, worked, lets copy around + for dir in squeeze sid; do + if [ -d dists/${dir}/ ]; then + cd dists/${dir}/main/i18n + rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/. + fi + cd ${scriptdir}/i18nsync + done + else + echo "ARRRR, bad guys, wrong files, ARRR" + echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org + fi + else + echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR" + echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org + fi + else + echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR." + echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org + fi +} + +# Process the accepted queue +function accepted() { + log "Processing queue/accepted" + rm -f "$accepted/REPORT" + dak process-accepted -pa -d "$accepted" > "$accepted/REPORT" + cat "$accepted/REPORT" | mail -s "Install for $(date +"%D - %R")" ftpmaster@ftp-master.debian.org + chgrp debadmin "$accepted/REPORT" + chmod 664 "$accepted/REPORT" +} + +function cruft() { + log "Checking for cruft in overrides" + dak check-overrides + + log "Fixing symlinks in $ftpdir" + symlinks -d -r $ftpdir +} + +function msfl() { + log "Generating suite file lists for apt-ftparchive" + dak make-suite-file-list +} + +function fingerprints() { + log "Updating fingerprints" + dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg +} + +function overrides() { + log "Writing overrides into text files" + cd $overridedir + dak make-overrides + + # FIXME + rm -f override.sid.all3 + for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done +} + +function mpfm() { + log "Generating package / file mapping" + dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2 +} + +function packages() { + log "Generating Packages and Sources files" + cd $configdir + apt-ftparchive generate apt.conf +} + +function pdiff() { + log "Generating pdiff files" + dak generate-index-diffs +} + +function release() { + log "Generating Release files" + dak generate-releases +} + +function dakcleanup() { + log "Cleanup old packages/files" + dak clean-suites -m 10000 + dak clean-queues +} + +function buildd() { + # Needs to be rebuilt, as files have moved. Due to unaccepts, we need to + # update this before wanna-build is updated. + log "Regenerating wanna-build/buildd information" + psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list + symlinks -d /srv/incoming.debian.org/buildd > /dev/null + apt-ftparchive generate apt.conf.buildd +} + +function scripts() { + log "Running various scripts from $scriptsdir" + cd $scriptsdir + ./mkmaintainers + ./copyoverrides + ./mklslar + ./mkfilesindices + ./mkchecksums +} + +function mirror() { + echo "Regenerating \"public\" mirror/ hardlink fun" + cd ${mirrordir} + rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. . +} + +function wb() { + log "Trigger daily wanna-build run" + ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org +} + +function expire() { + log "Expiring old database dumps..." + cd $base/backup + $scriptsdir/expire_dumps -d . -p -f "dump_*" +} + +function reports() { + # Send a report on NEW/BYHAND packages + log "Nagging ftpteam about NEW/BYHAND packages" + dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org + # and one on crufty packages + log "Sending information about crufty packages" + dak cruft-report > $webdir/cruft-report-daily.txt + dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt + cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org +} + +function dm() { + log "Updating DM html page" + $scriptsdir/dm-monitor >$webdir/dm-uploaders.html +} + +function bts() { + log "Categorizing uncategorized bugs filed against ftp.debian.org" + dak bts-categorize +} -# Push merkels qa user, so the qa pages can show "dinstall is running" information -echo "Telling merkels QA user that we start dinstall" -ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1 || true -ts +function merkel2() { + # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached + log "Trigger merkels projectb sync" + ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1 +} -################################################################################ +function runparts() { + log "Using run-parts to run scripts in $base/scripts/distmnt" + run-parts --report $base/scripts/distmnt +} -echo "Creating pre-daily-cron-job backup of projectb database..." -pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) +function i18n2() { + log "Exporting package data foo for i18n project" + STAMP=$(date "+%Y%m%d%H%M") + mkdir -p ${scriptdir}/i18n/${STAMP} + cd ${scriptdir}/i18n/${STAMP} + dak control-suite -l stable > lenny + dak control-suite -l testing > squeeze + dak control-suite -l unstable > sid + echo "${STAMP}" > timestamp + gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o timestamp.gpg timestamp + rm -f md5sum + md5sum * > md5sum + cd ${webdir}/ + ln -sfT ${scriptdir}/i18n/${STAMP} i18n + + cd ${scriptdir} + find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf +} -################################################################################ +function stats() { + log "Updating stats data" + cd $configdir + $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data + R --slave --vanilla < $base/misc/ftpstats.R +} -ts -echo "Updating Bugs docu, Mirror list and mailing-lists.txt" -cd $configdir -$scriptsdir/update-bugdoctxt -$scriptsdir/update-mirrorlists -$scriptsdir/update-mailingliststxt +function aptftpcleanup() { + log "Clean up apt-ftparchive's databases" + cd $configdir + apt-ftparchive -q clean apt.conf +} -################################################################################ +function compress() { + log "Compress old psql backups" + cd $base/backup/ + find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +1 | + while read dumpname; do + echo "Compressing $dumpname" + bzip2 -9v "$dumpname" + done +} -ts -echo "Doing automated p-u-new processing" -cd $queuedir/p-u-new -date -u -R >> REPORT -dak process-new -a -C COMMENTS >> REPORT || true -echo >> REPORT +function logstats() { + $masterdir/tools/logs.py "$1" +} -ts -echo "Doing automated o-p-u-new processing" -cd $queuedir/o-p-u-new -date -u -R >> REPORT -dak process-new -a -C COMMENTS >> REPORT || true -echo >> REPORT +# save timestamp when we start +function savetimestamp() { + NOW=`date "+%Y.%m.%d-%H:%M:%S"` + echo ${NOW} > "${dbdir}/dinstallstart" +} -################################################################################ +function maillogfile() { + cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org +} -ts -lockfile $LOCKAC -lockac=1 -echo "Processing queue/accepted" -cd $accepted -rm -f REPORT -dak process-accepted -pa *.changes | tee REPORT | \ - mail -s "Install for $(date +%D)" ftpmaster@ftp-master.debian.org -chgrp debadmin REPORT -chmod 664 REPORT +function renamelogfile() { + if [ -f "${dbdir}/dinstallstart" ]; then + NOW=$(cat "${dbdir}/dinstallstart") + maillogfile + mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" + logstats "$logdir/dinstall_${NOW}.log" + bzip2 -9 "$logdir/dinstall_${NOW}.log" + else + error "Problem, I don't know when dinstall started, unable to do log statistics." + NOW=`date "+%Y.%m.%d-%H:%M:%S"` + maillogfile + mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" + bzip2 -9 "$logdir/dinstall_${NOW}.log" + fi +} -ts -echo "Checking for cruft in overrides" -dak check-overrides -rm -f $LOCKAC -lockac=0 +function testingsourcelist() { + dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list +} +######################################################################## +######################################################################## -echo "Fixing symlinks in $ftpdir" -symlinks -d -r $ftpdir +# Function to save which stage we are in, so we can restart an interrupted +# dinstall. Or even run actions in parallel, if we dare to, by simply +# backgrounding the call to this function. But that should only really be +# done for things we dont care much about. +# +# This should be called with the first argument being an array, with the +# members +# - FUNC - the function name to call +# - ARGS - Possible arguments to hand to the function. Can be the empty string +# - TS - The timestamp name. Can be the empty string +# - ERR - if this is the string false, then the call will be surrounded by +# set +e ... set -e calls, so errors in the function do not exit +# dinstall. Can be the empty string, meaning true. +# +# MAKE SURE TO KEEP THIS THE LAST FUNCTION, AFTER ALL THE VARIOUS ONES +# ADDED FOR DINSTALL FEATURES! +function stage() { + ARGS='GO[@]' + local "${!ARGS}" + + if [ -f "${stagedir}/${FUNC}" ]; then + stamptime=$(/usr/bin/stat -c %Z "${stagedir}/${FUNC}") + unixtime=$(date +%s) + difference=$(( $unixtime - $stamptime )) + if [ ${difference} -ge 14400 ]; then + log_error "Did already run ${FUNC}, stagefile exists, but that was ${difference} seconds ago. Please check." + else + log "Did already run ${FUNC}, not calling again..." + fi + return + fi + + debug "Now calling function ${FUNC}. Arguments: ${ARGS}. Timestamp: ${TS}" + + # Make sure we are always at the same place. If a function wants to be elsewhere, + # it has to cd first! + cd ${configdir} + + if [ -f "${LOCK_STOP}" ]; then + log "${LOCK_STOP} exists, exiting immediately" + exit 42 + fi + + if [ "${ERR}" = "false" ]; then + set +e + fi + ${FUNC} ${ARGS} + + # No matter what happened in the function, we make sure we have set -e default state back + set -e + + # Make sure we are always at the same place. + cd ${configdir} + + touch "${stagedir}/${FUNC}" + + if [ -n "${TIME}" ]; then + ts "${TIME}" + fi + + if [ -f "${LOCK_STOP}" ]; then + log "${LOCK_STOP} exists, exiting immediately" + exit 42 + fi +} -ts +######################################################################## -echo "Generating suite file lists for apt-ftparchive" -dak make-suite-file-list +# We need logs. +LOGFILE="$logdir/dinstall.log" -ts +exec >> "$LOGFILE" 2>&1 -echo "Updating fingerprints" -# Update fingerprints -dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg || true +# usually we are not using debug logs. Set to 1 if you want them. +DEBUG=0 -ts +# our name +PROGRAM="dinstall" -# Generate override files -echo "Writing overrides into text files" -cd $overridedir -dak make-overrides +# where do we want mails to go? For example log entries made with error() +if [ "x$(hostname -s)x" != "xriesx" ]; then + # Not our ftpmaster host + MAILTO=${MAILTO:-"root"} +else + # Yay, ftpmaster + MAILTO=${MAILTO:-"ftpmaster@debian.org"} +fi -ts +# How many logfiles to keep +LOGROTATE=${LOGROTATE:-400} -# FIXME -rm -f override.sid.all3 -for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done +# Marker for dinstall start +DINSTALLSTART="${lockdir}/dinstallstart" +# Marker for dinstall end +DINSTALLEND="${lockdir}/dinstallend" -ts +# Timestamps start at -1. so first gets 0 +TS=-1 +touch "${DINSTALLSTART}" +ts "startup" -# Generate Packages and Sources files -echo "Generating Packages and Sources files" -cd $configdir -apt-ftparchive generate apt.conf +# Tell everyone we are doing some work +NOTICE="$ftpdir/Archive_Maintenance_In_Progress" -ts +# lock cron.unchecked (it immediately exits when this exists) +LOCK_DAILY="$lockdir/daily.lock" + +# Lock process-new and cron.unchecked from doing work +LOCK_ACCEPTED="$lockdir/unchecked.lock" + +# This file is simply used to indicate to britney whether or not +# the Packages file updates completed sucessfully. It's not a lock +# from our point of view +LOCK_BRITNEY="$lockdir/britney.lock" + +# If this file exists we exit immediately after the currently running +# function is done +LOCK_STOP="$lockdir/archive.stop" + +lockfile -l 3600 "${LOCK_DAILY}" +trap onerror ERR +trap cleanup EXIT TERM HUP INT QUIT + +touch "${LOCK_BRITNEY}" + +GO=( + FUNC="savetimestamp" + TIME="" + ARGS="" + ERR="false" +) +stage $GO + +GO=( + FUNC="notice" + TIME="" + ARGS="" + ERR="false" +) +stage $GO + +GO=( + FUNC="merkel1" + TIME="init" + ARGS="" + ERR="false" +) +stage $GO + +GO=( + FUNC="pgdump_pre" + TIME="pg_dump1" + ARGS="" + ERR="" +) +stage $GO + +GO=( + FUNC="updates" + TIME="External Updates" + ARGS="" + ERR="false" +) +stage $GO -# Generate *.diff/ incremental updates -echo "Generating pdiff files" -dak generate-index-diffs +GO=( + FUNC="punew" + TIME="p-u-new" + ARGS="p-u-new" + ERR="" +) +stage $GO -ts +GO=( + FUNC="opunew" + TIME="o-p-u-new" + ARGS="o-p-u-new" + ERR="" +) +stage $GO -# Generate Release files -echo "Generating Release files" -dak generate-releases +GO=( + FUNC="i18n1" + TIME="i18n 1" + ARGS="" + ERR="false" +) +stage $GO -ts +lockfile "$LOCK_ACCEPTED" -# Clean out old packages -echo "Cleanup old packages/files" -dak clean-suites -dak clean-queues +GO=( + FUNC="accepted" + TIME="accepted" + ARGS="" + ERR="" +) +stage $GO -ts +GO=( + FUNC="cruft" + TIME="cruft" + ARGS="" + ERR="" +) +stage $GO -# Needs to be rebuilt, as files have moved. Due to unaccepts, we need to -# update this before wanna-build is updated. -echo "Regenerating wanna-build/buildd information" -psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list -symlinks -d /srv/incoming.debian.org/buildd > /dev/null -apt-ftparchive generate apt.conf.buildd +rm -f "$LOCK_ACCEPTED" -ts +GO=( + FUNC="msfl" + TIME="make-suite-file-list" + ARGS="" + ERR="" +) +stage $GO -echo "Running various scripts from $scriptsdir" -cd $scriptsdir -./mkmaintainers -./copyoverrides -./mklslar -./mkfilesindices -./mkchecksums +GO=( + FUNC="fingerprints" + TIME="import-keyring" + ARGS="" + ERR="false" +) +stage $GO -ts +GO=( + FUNC="overrides" + TIME="overrides" + ARGS="" + ERR="" +) +stage $GO -echo "Trigger daily wanna-build run" -ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 buildd@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org +GO=( + FUNC="mpfm" + TIME="pkg-file-mapping" + ARGS="" + ERR="false" +) +stage $GO -rm -f $NOTICE -rm -f $LOCKCU -echo Archive maintenance finished at $(date +%H:%M:%S) +GO=( + FUNC="packages" + TIME="apt-ftparchive" + ARGS="" + ERR="" +) +stage $GO -################################################################################ +GO=( + FUNC="pdiff" + TIME="pdiff" + ARGS="" + ERR="" +) +stage $GO -echo "Creating post-daily-cron-job backup of projectb database..." -POSTDUMP=$base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) -pg_dump projectb > $POSTDUMP -(cd $base/backup; ln -sf $POSTDUMP current) +GO=( + FUNC="release" + TIME="release files" + ARGS="" + ERR="" +) +stage $GO -################################################################################ +GO=( + FUNC="dakcleanup" + TIME="cleanup" + ARGS="" + ERR="" +) +stage $GO -ts +GO=( + FUNC="buildd" + TIME="buildd" + ARGS="" + ERR="" +) +stage $GO -echo "Expiring old database dumps..." -(cd $base/backup; $scriptsdir/expire_dumps -d . -p -f "dump_*") +GO=( + FUNC="scripts" + TIME="scripts" + ARGS="" + ERR="" +) +stage $GO -################################################################################ +GO=( + FUNC="mirror" + TIME="mirror hardlinks" + ARGS="" + ERR="" +) +stage $GO -ts +GO=( + FUNC="wb" + TIME="w-b" + ARGS="" + ERR="" +) +stage $GO -# Send a report on NEW/BYHAND packages -echo "Nagging ftpteam about NEW/BYHAND packages" -dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org -# and one on crufty packages -echo "Sending information about crufty packages" -dak cruft-report > $webdir/cruft-report-daily.txt -dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt -cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org +rm -f "${NOTICE}" +rm -f "${LOCK_DAILY}" -ts +ts "locked part finished" -echo "Updating DM html page" -$scriptsdir/dm-monitor >$webdir/dm-uploaders.html +GO=( + FUNC="pgdump_post" + TIME="pg_dump2" + ARGS="" + ERR="" +) +stage $GO -################################################################################ +GO=( + FUNC="expire" + TIME="expire_dumps" + ARGS="" + ERR="" +) +stage $GO -# Push katie@merkel so it syncs the projectb there. Returns immediately, the sync runs detached -echo "Trigger merkels projectb sync" -ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb katie@merkel.debian.org sleep 1 || true +GO=( + FUNC="reports" + TIME="reports" + ARGS="" + ERR="" +) +stage $GO -################################################################################ +GO=( + FUNC="dm" + TIME="" + ARGS="" + ERR="" +) +stage $GO -ts +GO=( + FUNC="bts" + TIME="" + ARGS="" + ERR="false" +) +stage $GO -ulimit -m 90000 -d 90000 -s 10000 -v 200000 +GO=( + FUNC="merkel2" + TIME="merkel projectb push" + ARGS="" + ERR="false" +) +stage $GO -echo "Using run-parts to run scripts in $base/scripts/distmnt" -run-parts --report $base/scripts/distmnt +GO=( + FUNC="runparts" + TIME="run-parts" + ARGS="" + ERR="false" +) +stage $GO -echo "Daily cron scripts successful." +GO=( + FUNC="i18n2" + TIME="i18n 2" + ARGS="" + ERR="false" +) +stage $GO -ts +GO=( + FUNC="stats" + TIME="stats" + ARGS="" + ERR="false" +) +stage $GO -# Stats pr0n -echo "Updating stats data" -cd $configdir -$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data -R --slave --vanilla < $base/misc/ftpstats.R +GO=( + FUNC="testingsourcelist" + TIME="" + ARGS="" + ERR="false" +) +stage $GO -ts +rm -f ${LOCK_BRITNEY} -# Clean up apt-ftparchive's databases -echo "Clean up apt-ftparchive's databases" -cd $configdir -apt-ftparchive -q clean apt.conf +GO=( + FUNC="pgdakdev" + TIME="dak-dev db" + ARGS="" + ERR="false" +) +stage $GO -ts +GO=( + FUNC="aptftpcleanup" + TIME="apt-ftparchive cleanup" + ARGS="" + ERR="" +) +stage $GO -# Compress psql backups -echo "Compress old psql backups" -(cd $base/backup/ - find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' | - while read dumpname; do - echo "Compressing $dumpname" - bzip2 -9 "$dumpname" - done +GO=( + FUNC="compress" + TIME="compress" + ARGS="" + ERR="" ) +stage $GO + +log "Daily cron scripts successful, all done" -ts +exec > "$logdir/afterdinstall.log" 2>&1 -echo "Finally, all is done, sending mail and compressing logfile" -exec > /dev/null 2>&1 +GO=( + FUNC="renamelogfile" + TIME="" + ARGS="" + ERR="false" +) +stage $GO -cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -bzip2 -9 "$LOGFILE" -################################################################################ +# Now, at the very (successful) end of dinstall, make sure we remove +# our stage files, so the next dinstall run will do it all again. +rm -f ${stagedir}/* +touch "${DINSTALLEND}"