#!/bin/bash # No way I try to deal with a crippled sh just for POSIX foo. # Copyright (C) 2009 Joerg Jaspert # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; version 2. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # Homer: Are you saying you're never going to eat any animal again? What # about bacon? # Lisa: No. # Homer: Ham? # Lisa: No. # Homer: Pork chops? # Lisa: Dad, those all come from the same animal. # Homer: Heh heh heh. Ooh, yeah, right, Lisa. A wonderful, magical animal. # exit on errors set -e # make sure to only use defined variables set -u # ERR traps should be inherited from functions too. (And command # substitutions and subshells and whatnot, but for us the functions is # the important part here) set -E # import the general variable set. export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars . $SCRIPTVARS ######################################################################## # Functions # ######################################################################## # common functions are "outsourced" . "${configdir}/common" # Timestamp. Used for dinstall stat graphs function ts() { echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)" } # Cleanup actions function cleanup() { rm -f ${LOCK_DAILY} rm -f ${LOCK_ACCEPTED} } # If we error out this one is called, *FOLLOWED* by cleanup above function onerror() { ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S") subject="ATTENTION ATTENTION!" if [ "${error}" = "false" ]; then subject="${subject} (continued)" else subject="${subject} (interrupted)" fi subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)" cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org } ######################################################################## # the actual dinstall functions follow # ######################################################################## # Setup the notice file to tell bad mirrors they used the wrong time function notice() { rm -f "$NOTICE" cat > "$NOTICE" < $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) } function pgdump_post() { log "Creating post-daily-cron-job backup of projectb database..." cd $base/backup POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S) pg_dump projectb > $base/backup/dump_$POSTDUMP pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP ln -sf $base/backup/dump_$POSTDUMP current ln -sf $base/backup/dumpall_$POSTDUMP currentall } # Load the dak-dev projectb function pgdakdev() { cd $base/backup echo "drop database projectb" | psql -p 5433 template1 cat currentall | psql -p 5433 template1 createdb -p 5433 -T template0 projectb fgrep -v '\connect' current | psql -p 5433 projectb } # Updating various files function updates() { log "Updating Bugs docu, Mirror list and mailing-lists.txt" cd $configdir $scriptsdir/update-bugdoctxt $scriptsdir/update-mirrorlists $scriptsdir/update-mailingliststxt $scriptsdir/update-pseudopackages.sh } # Process (oldstable)-proposed-updates "NEW" queue function punew_do() { cd "${queuedir}/${1}" date -u -R >> REPORT dak process-new -a -C COMMENTS >> REPORT || true echo >> REPORT } function punew() { log "Doing automated p-u-new processing" punew_do "$1" } function opunew() { log "Doing automated o-p-u-new processing" punew_do "$1" } # The first i18n one, syncing new descriptions function i18n1() { log "Synchronizing i18n package descriptions" # First sync their newest data cd ${scriptdir}/i18nsync rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true # Now check if we still know about the packages for which they created the files # is the timestamp signed by us? if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then # now read it. As its signed by us we are sure the content is what we expect, no need # to do more here. And we only test -d a directory on it anyway. TSTAMP=$(cat timestamp) # do we have the dir still? if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then # Lets check! if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then # Yay, worked, lets copy around for dir in squeeze sid; do if [ -d dists/${dir}/ ]; then cd dists/${dir}/main/i18n rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/. fi cd ${scriptdir}/i18nsync done else echo "ARRRR, bad guys, wrong files, ARRR" echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org fi else echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR" echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org fi else echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR." echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org fi } # Process the accepted queue function accepted() { log "Processing queue/accepted" rm -f "$accepted/REPORT" dak process-accepted -pa -d "$accepted" > "$accepted/REPORT" cat "$accepted/REPORT" | mail -s "Install for $(date +"%D - %R")" ftpmaster@ftp-master.debian.org chgrp debadmin "$accepted/REPORT" chmod 664 "$accepted/REPORT" } function cruft() { log "Checking for cruft in overrides" dak check-overrides log "Fixing symlinks in $ftpdir" symlinks -d -r $ftpdir } function msfl() { log "Generating suite file lists for apt-ftparchive" dak make-suite-file-list } function fingerprints() { log "Updating fingerprints" dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg OUTFILE=$(mktemp) dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}" if [ -s "${OUTFILE}" ]; then /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org < To: Subject: Debian Maintainers Keyring changes Content-Type: text/plain; charset=utf-8 MIME-Version: 1.0 The following changes to the debian-maintainers keyring have just been activated: $(cat $OUTFILE) Debian distribution maintenance software, on behalf of the Keyring maintainers EOF fi rm -f "$OUTFILE" } function overrides() { log "Writing overrides into text files" cd $overridedir dak make-overrides # FIXME rm -f override.sid.all3 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done } function mpfm() { log "Generating package / file mapping" dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2 } function packages() { log "Generating Packages and Sources files" cd $configdir apt-ftparchive generate apt.conf } function pdiff() { log "Generating pdiff files" dak generate-index-diffs } function release() { log "Generating Release files" dak generate-releases } function dakcleanup() { log "Cleanup old packages/files" dak clean-suites -m 10000 dak clean-queues } function buildd() { # Needs to be rebuilt, as files have moved. Due to unaccepts, we need to # update this before wanna-build is updated. log "Regenerating wanna-build/buildd information" psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list symlinks -d /srv/incoming.debian.org/buildd > /dev/null apt-ftparchive generate apt.conf.buildd } function buildd_dir() { # Rebuilt the buildd dir to avoid long times of 403 log "Regenerating the buildd incoming dir" STAMP=$(date "+%Y%m%d%H%M") make_buildd_dir } function scripts() { log "Running various scripts from $scriptsdir" cd $scriptsdir ./mkmaintainers ./copyoverrides ./mklslar ./mkfilesindices ./mkchecksums } function mirror() { echo "Regenerating \"public\" mirror/ hardlink fun" cd ${mirrordir} rsync -aH --link-dest ${ftpdir} --exclude Archive_Maintenance_In_Progress --delete --delete-after --ignore-errors ${ftpdir}/. . } function wb() { log "Trigger daily wanna-build run" ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org } function expire() { log "Expiring old database dumps..." cd $base/backup $scriptsdir/expire_dumps -d . -p -f "dump_*" } function transitionsclean() { log "Removing out of date transitions..." cd $base dak transitions -c -a } function reports() { # Send a report on NEW/BYHAND packages log "Nagging ftpteam about NEW/BYHAND packages" dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org # and one on crufty packages log "Sending information about crufty packages" dak cruft-report > $webdir/cruft-report-daily.txt dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org } function dm() { log "Updating DM html page" $scriptsdir/dm-monitor >$webdir/dm-uploaders.html } function bts() { log "Categorizing uncategorized bugs filed against ftp.debian.org" dak bts-categorize } function merkel2() { # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached log "Trigger merkel/flotows projectb sync" ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1 # Also trigger flotow, the ftpmaster test box ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1 } function merkel3() { # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached log "Trigger merkels dd accessible parts sync" ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1 } function runparts() { log "Using run-parts to run scripts in $base/scripts/distmnt" run-parts --report $base/scripts/distmnt } function i18n2() { log "Exporting package data foo for i18n project" STAMP=$(date "+%Y%m%d%H%M") mkdir -p ${scriptdir}/i18n/${STAMP} cd ${scriptdir}/i18n/${STAMP} dak control-suite -l stable > lenny dak control-suite -l testing > squeeze dak control-suite -l unstable > sid echo "${STAMP}" > timestamp gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp rm -f md5sum md5sum * > md5sum cd ${webdir}/ ln -sfT ${scriptdir}/i18n/${STAMP} i18n cd ${scriptdir} find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf } function stats() { log "Updating stats data" cd $configdir $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data R --slave --vanilla < $base/misc/ftpstats.R dak stats arch-space > $webdir/arch-space dak stats pkg-nums > $webdir/pkg-nums } function aptftpcleanup() { log "Clean up apt-ftparchive's databases" cd $configdir apt-ftparchive -q clean apt.conf } function compress() { log "Compress old psql backups" cd $base/backup/ find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 | while read dumpname; do echo "Compressing $dumpname" bzip2 -9fv "$dumpname" done find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 | while read dumpname; do echo "Compressing $dumpname" bzip2 -9fv "$dumpname" done finddup -l -d $base/backup } function logstats() { $masterdir/tools/logs.py "$1" } # save timestamp when we start function savetimestamp() { NOW=`date "+%Y.%m.%d-%H:%M:%S"` echo ${NOW} > "${dbdir}/dinstallstart" } function maillogfile() { cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org } function renamelogfile() { if [ -f "${dbdir}/dinstallstart" ]; then NOW=$(cat "${dbdir}/dinstallstart") # maillogfile mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" logstats "$logdir/dinstall_${NOW}.log" bzip2 -9 "$logdir/dinstall_${NOW}.log" else error "Problem, I don't know when dinstall started, unable to do log statistics." NOW=`date "+%Y.%m.%d-%H:%M:%S"` # maillogfile mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" bzip2 -9 "$logdir/dinstall_${NOW}.log" fi } function testingsourcelist() { dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list } # do a last run of process-unchecked before dinstall is on. function process_unchecked() { log "Processing the unchecked queue" acceptnew UNCHECKED_WITHOUT_LOCK="-p" do_unchecked sync_debbugs } ######################################################################## ######################################################################## # Function to save which stage we are in, so we can restart an interrupted # dinstall. Or even run actions in parallel, if we dare to, by simply # backgrounding the call to this function. But that should only really be # done for things we don't care much about. # # This should be called with the first argument being an array, with the # members # - FUNC - the function name to call # - ARGS - Possible arguments to hand to the function. Can be the empty string # - TIME - The timestamp name. Can be the empty string # - ERR - if this is the string false, then the call will be surrounded by # set +e ... set -e calls, so errors in the function do not exit # dinstall. Can be the empty string, meaning true. # # MAKE SURE TO KEEP THIS THE LAST FUNCTION, AFTER ALL THE VARIOUS ONES # ADDED FOR DINSTALL FEATURES! function stage() { ARGS='GO[@]' local "${!ARGS}" error=${ERR:-"true"} STAGEFILE="${stagedir}/${FUNC}" if [ -f "${STAGEFILE}" ]; then stamptime=$(/usr/bin/stat -c %Z "${STAGEFILE}") unixtime=$(date +%s) difference=$(( $unixtime - $stamptime )) if [ ${difference} -ge 14400 ]; then log_error "Did already run ${FUNC}, stagefile exists, but that was ${difference} seconds ago. Please check." else log "Did already run ${FUNC}, not calling again..." fi return fi debug "Now calling function ${FUNC}. Arguments: ${ARGS}. Timestamp: ${TIME}" # Make sure we are always at the same place. If a function wants to be elsewhere, # it has to cd first! cd ${configdir} # Now redirect the output into $STAGEFILE.log. In case it errors out somewhere our # errorhandler trap can then mail the contents of $STAGEFILE.log only, instead of a whole # dinstall logfile. Short error mails ftw! exec >> "${STAGEFILE}.log" 2>&1 if [ -f "${LOCK_STOP}" ]; then log "${LOCK_STOP} exists, exiting immediately" exit 42 fi if [ "${error}" = "false" ]; then set +e fi ${FUNC} ${ARGS} # No matter what happened in the function, we make sure we have set -e default state back set -e # Make sure we are always at the same place. cd ${configdir} touch "${STAGEFILE}" if [ -n "${TIME}" ]; then ts "${TIME}" fi # And the output goes back to the normal logfile exec >> "$LOGFILE" 2>&1 # Now we should make sure that we have a usable dinstall.log, so append the $STAGEFILE.log # to it. cat "${STAGEFILE}.log" >> "${LOGFILE}" rm -f "${STAGEFILE}.log" if [ -f "${LOCK_STOP}" ]; then log "${LOCK_STOP} exists, exiting immediately" exit 42 fi } ######################################################################## # We need logs. LOGFILE="$logdir/dinstall.log" exec >> "$LOGFILE" 2>&1 # usually we are not using debug logs. Set to 1 if you want them. DEBUG=0 # our name PROGRAM="dinstall" # where do we want mails to go? For example log entries made with error() if [ "x$(hostname -s)x" != "xriesx" ]; then # Not our ftpmaster host MAILTO=${MAILTO:-"root"} else # Yay, ftpmaster MAILTO=${MAILTO:-"ftpmaster@debian.org"} fi # How many logfiles to keep LOGROTATE=${LOGROTATE:-400} # Marker for dinstall start DINSTALLSTART="${lockdir}/dinstallstart" # Marker for dinstall end DINSTALLEND="${lockdir}/dinstallend" touch "${DINSTALLSTART}" ts "startup" # Tell everyone we are doing some work NOTICE="$ftpdir/Archive_Maintenance_In_Progress" # lock cron.unchecked (it immediately exits when this exists) LOCK_DAILY="$lockdir/daily.lock" # Lock cron.unchecked from doing work LOCK_ACCEPTED="$lockdir/unchecked.lock" # Lock process-new from doing work LOCK_NEW="$lockdir/processnew.lock" # This file is simply used to indicate to britney whether or not # the Packages file updates completed sucessfully. It's not a lock # from our point of view LOCK_BRITNEY="$lockdir/britney.lock" # If this file exists we exit immediately after the currently running # function is done LOCK_STOP="$lockdir/archive.stop" lockfile -l 3600 "${LOCK_DAILY}" trap onerror ERR trap cleanup EXIT TERM HUP INT QUIT touch "${LOCK_BRITNEY}" GO=( FUNC="savetimestamp" TIME="" ARGS="" ERR="false" ) stage $GO GO=( FUNC="notice" TIME="" ARGS="" ERR="false" ) stage $GO GO=( FUNC="merkel1" TIME="init" ARGS="" ERR="false" ) stage $GO & GO=( FUNC="pgdump_pre" TIME="pg_dump1" ARGS="" ERR="" ) stage $GO GO=( FUNC="updates" TIME="External Updates" ARGS="" ERR="false" ) stage $GO GO=( FUNC="punew" TIME="p-u-new" ARGS="p-u-new" ERR="" ) stage $GO GO=( FUNC="opunew" TIME="o-p-u-new" ARGS="o-p-u-new" ERR="" ) stage $GO GO=( FUNC="i18n1" TIME="i18n 1" ARGS="" ERR="false" ) stage $GO lockfile "$LOCK_ACCEPTED" lockfile "$LOCK_NEW" GO=( FUNC="process_unchecked" TIME="" ARGS="" ERR="" ) stage $GO GO=( FUNC="accepted" TIME="accepted" ARGS="" ERR="" ) stage $GO GO=( FUNC="buildd_dir" TIME="buildd_dir" ARGS="" ERR="false" ) stage $GO GO=( FUNC="cruft" TIME="cruft" ARGS="" ERR="" ) stage $GO rm -f "$LOCK_ACCEPTED" rm -f "$LOCK_NEW" GO=( FUNC="msfl" TIME="make-suite-file-list" ARGS="" ERR="" ) stage $GO GO=( FUNC="fingerprints" TIME="import-keyring" ARGS="" ERR="false" ) stage $GO GO=( FUNC="overrides" TIME="overrides" ARGS="" ERR="" ) stage $GO GO=( FUNC="mpfm" TIME="pkg-file-mapping" ARGS="" ERR="false" ) stage $GO GO=( FUNC="packages" TIME="apt-ftparchive" ARGS="" ERR="" ) stage $GO GO=( FUNC="pdiff" TIME="pdiff" ARGS="" ERR="" ) stage $GO GO=( FUNC="release" TIME="release files" ARGS="" ERR="" ) stage $GO GO=( FUNC="dakcleanup" TIME="cleanup" ARGS="" ERR="" ) stage $GO GO=( FUNC="buildd" TIME="buildd" ARGS="" ERR="" ) stage $GO GO=( FUNC="scripts" TIME="scripts" ARGS="" ERR="" ) stage $GO GO=( FUNC="mirror" TIME="mirror hardlinks" ARGS="" ERR="" ) stage $GO GO=( FUNC="wb" TIME="w-b" ARGS="" ERR="" ) stage $GO rm -f "${NOTICE}" rm -f "${LOCK_DAILY}" ts "locked part finished" GO=( FUNC="pgdump_post" TIME="pg_dump2" ARGS="" ERR="" ) stage $GO GO=( FUNC="expire" TIME="expire_dumps" ARGS="" ERR="" ) stage $GO GO=( FUNC="transitionsclean" TIME="transitionsclean" ARGS="" ERR="" ) stage $GO GO=( FUNC="reports" TIME="reports" ARGS="" ERR="" ) stage $GO GO=( FUNC="dm" TIME="" ARGS="" ERR="" ) stage $GO GO=( FUNC="bts" TIME="" ARGS="" ERR="false" ) stage $GO GO=( FUNC="merkel2" TIME="merkel projectb push" ARGS="" ERR="false" ) stage $GO GO=( FUNC="runparts" TIME="run-parts" ARGS="" ERR="false" ) stage $GO GO=( FUNC="i18n2" TIME="i18n 2" ARGS="" ERR="false" ) stage $GO GO=( FUNC="stats" TIME="stats" ARGS="" ERR="false" ) stage $GO GO=( FUNC="testingsourcelist" TIME="" ARGS="" ERR="false" ) stage $GO rm -f ${LOCK_BRITNEY} GO=( FUNC="pgdakdev" TIME="dak-dev db" ARGS="" ERR="false" ) stage $GO & GO=( FUNC="aptftpcleanup" TIME="apt-ftparchive cleanup" ARGS="" ERR="false" ) stage $GO GO=( FUNC="merkel3" TIME="merkel ddaccessible sync" ARGS="" ERR="false" ) stage $GO GO=( FUNC="compress" TIME="compress" ARGS="" ERR="" ) stage $GO log "Daily cron scripts successful, all done" exec > "$logdir/afterdinstall.log" 2>&1 GO=( FUNC="renamelogfile" TIME="" ARGS="" ERR="false" ) stage $GO # Now, at the very (successful) end of dinstall, make sure we remove # our stage files, so the next dinstall run will do it all again. rm -f ${stagedir}/* touch "${DINSTALLEND}"