X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=config%2Fdebian%2Fcron.dinstall;h=1c9fa5afefff3284fd27f860fcb63d40d5aad16d;hb=1fa1f22b70c6ee46aea78ee40b9797a574d7c583;hp=cc2db8adcc0f7985dc6d3d22980b6a48404059a2;hpb=11dce36cabea5fa916ef6da415b4669898ba9ff7;p=dak.git diff --git a/config/debian/cron.dinstall b/config/debian/cron.dinstall index cc2db8ad..1c9fa5af 100755 --- a/config/debian/cron.dinstall +++ b/config/debian/cron.dinstall @@ -16,10 +16,23 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. +# Homer: Are you saying you're never going to eat any animal again? What +# about bacon? +# Lisa: No. +# Homer: Ham? +# Lisa: No. +# Homer: Pork chops? +# Lisa: Dad, those all come from the same animal. +# Homer: Heh heh heh. Ooh, yeah, right, Lisa. A wonderful, magical animal. + # exit on errors set -e # make sure to only use defined variables set -u +# ERR traps should be inherited from functions too. (And command +# substitutions and subshells and whatnot, but for us the functions is +# the important part here) +set -E # import the general variable set. export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars @@ -33,8 +46,7 @@ export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars # Timestamp. Used for dinstall stat graphs function ts() { - TS=$(($TS+1)); - echo "Archive maintenance timestamp $TS ($1): $(date +%H:%M:%S)" + echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)" } # Cleanup actions @@ -46,7 +58,16 @@ function cleanup() { # If we error out this one is called, *FOLLOWED* by cleanup above function onerror() { ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S") - cat "$LOGFILE" | mail -s "ATTENTION ATTENTION! dinstall error at ${ERRDATE} (Be quiet, Brain, or I'll stab you with a Q-tip)" cron@ftp-master.debian.org + + subject="ATTENTION ATTENTION!" + if [ "${error}" = "false" ]; then + subject="${subject} (continued)" + else + subject="${subject} (interrupted)" + fi + subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)" + + cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org } ######################################################################## @@ -61,7 +82,9 @@ Packages are currently being installed and indices rebuilt. Maintenance is automatic, starting at 01|07|13|19:52 UTC, and ending about an hour later. This file is then removed. -You should not mirror the archive during this period. +You should not mirror the archive during this period. If you find this +file on a Debian mirror please have a nice talk with the admin. They +are doing something wrong. EOF } @@ -74,7 +97,7 @@ function merkel1() { # Create the postgres dump files function pgdump_pre() { log "Creating pre-daily-cron-job backup of projectb database..." - pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) + pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) } function pgdump_post() { @@ -187,6 +210,28 @@ function msfl() { function fingerprints() { log "Updating fingerprints" dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg + + OUTFILE=$(mktemp) + dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}" + + if [ -s "${OUTFILE}" ]; then + /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org < +To: +Subject: Debian Maintainers Keyring changes +Content-Type: text/plain; charset=utf-8 +MIME-Version: 1.0 + +The following changes to the debian-maintainers keyring have just been activated: + +$(cat $OUTFILE) + +Debian distribution maintenance software, +on behalf of the Keyring maintainers + +EOF + fi + rm -f "$OUTFILE" } function overrides() { @@ -222,7 +267,7 @@ function release() { function dakcleanup() { log "Cleanup old packages/files" - dak clean-suites + dak clean-suites -m 10000 dak clean-queues } @@ -235,6 +280,13 @@ function buildd() { apt-ftparchive generate apt.conf.buildd } +function buildd_dir() { + # Rebuilt the buildd dir to avoid long times of 403 + log "Regenerating the buildd incoming dir" + STAMP=$(date "+%Y%m%d%H%M") + make_buildd_dir +} + function scripts() { log "Running various scripts from $scriptsdir" cd $scriptsdir @@ -248,7 +300,7 @@ function scripts() { function mirror() { echo "Regenerating \"public\" mirror/ hardlink fun" cd ${mirrordir} - rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. . + rsync -aH --link-dest ${ftpdir} --exclude Archive_Maintenance_In_Progress --delete --delete-after --ignore-errors ${ftpdir}/. . } function wb() { @@ -262,6 +314,12 @@ function expire() { $scriptsdir/expire_dumps -d . -p -f "dump_*" } +function transitionsclean() { + log "Removing out of date transitions..." + cd $base + dak transitions -c -a +} + function reports() { # Send a report on NEW/BYHAND packages log "Nagging ftpteam about NEW/BYHAND packages" @@ -285,13 +343,24 @@ function bts() { function merkel2() { # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached - log "Trigger merkels projectb sync" + log "Trigger merkel/flotows projectb sync" ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1 + # Also trigger flotow, the ftpmaster test box + ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1 } -function runparts() { - log "Using run-parts to run scripts in $base/scripts/distmnt" - run-parts --report $base/scripts/distmnt +function merkel3() { + # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached + log "Trigger merkels dd accessible parts sync" + ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1 +} + +function mirrorpush() { + log "Starting the mirrorpush" + date -u > /srv/ftp.debian.org/web/mirrorstart + echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart + echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart + sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 & } function i18n2() { @@ -303,7 +372,7 @@ function i18n2() { dak control-suite -l testing > squeeze dak control-suite -l unstable > sid echo "${STAMP}" > timestamp - gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o timestamp.gpg timestamp + gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp rm -f md5sum md5sum * > md5sum cd ${webdir}/ @@ -318,6 +387,8 @@ function stats() { cd $configdir $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data R --slave --vanilla < $base/misc/ftpstats.R + dak stats arch-space > $webdir/arch-space + dak stats pkg-nums > $webdir/pkg-nums } function aptftpcleanup() { @@ -329,11 +400,19 @@ function aptftpcleanup() { function compress() { log "Compress old psql backups" cd $base/backup/ - find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +1 | + find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm + + find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 | while read dumpname; do echo "Compressing $dumpname" - bzip2 -9v "$dumpname" + bzip2 -9fv "$dumpname" done + find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 | + while read dumpname; do + echo "Compressing $dumpname" + bzip2 -9fv "$dumpname" + done + finddup -l -d $base/backup } function logstats() { @@ -353,14 +432,14 @@ function maillogfile() { function renamelogfile() { if [ -f "${dbdir}/dinstallstart" ]; then NOW=$(cat "${dbdir}/dinstallstart") - maillogfile - mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" +# maillogfile + mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" logstats "$logdir/dinstall_${NOW}.log" bzip2 -9 "$logdir/dinstall_${NOW}.log" else error "Problem, I don't know when dinstall started, unable to do log statistics." NOW=`date "+%Y.%m.%d-%H:%M:%S"` - maillogfile +# maillogfile mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" bzip2 -9 "$logdir/dinstall_${NOW}.log" fi @@ -369,19 +448,29 @@ function renamelogfile() { function testingsourcelist() { dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list } + +# do a last run of process-unchecked before dinstall is on. +function process_unchecked() { + log "Processing the unchecked queue" + acceptnew + UNCHECKED_WITHOUT_LOCK="-p" + do_unchecked + sync_debbugs +} + ######################################################################## ######################################################################## # Function to save which stage we are in, so we can restart an interrupted # dinstall. Or even run actions in parallel, if we dare to, by simply # backgrounding the call to this function. But that should only really be -# done for things we dont care much about. +# done for things we don't care much about. # # This should be called with the first argument being an array, with the # members # - FUNC - the function name to call # - ARGS - Possible arguments to hand to the function. Can be the empty string -# - TS - The timestamp name. Can be the empty string +# - TIME - The timestamp name. Can be the empty string # - ERR - if this is the string false, then the call will be surrounded by # set +e ... set -e calls, so errors in the function do not exit # dinstall. Can be the empty string, meaning true. @@ -392,8 +481,11 @@ function stage() { ARGS='GO[@]' local "${!ARGS}" - if [ -f "${stagedir}/${FUNC}" ]; then - stamptime=$(/usr/bin/stat -c %Z "${stagedir}/${FUNC}") + error=${ERR:-"true"} + + STAGEFILE="${stagedir}/${FUNC}" + if [ -f "${STAGEFILE}" ]; then + stamptime=$(/usr/bin/stat -c %Z "${STAGEFILE}") unixtime=$(date +%s) difference=$(( $unixtime - $stamptime )) if [ ${difference} -ge 14400 ]; then @@ -404,18 +496,23 @@ function stage() { return fi - debug "Now calling function ${FUNC}. Arguments: ${ARGS}. Timestamp: ${TS}" + debug "Now calling function ${FUNC}. Arguments: ${ARGS}. Timestamp: ${TIME}" # Make sure we are always at the same place. If a function wants to be elsewhere, # it has to cd first! cd ${configdir} + # Now redirect the output into $STAGEFILE.log. In case it errors out somewhere our + # errorhandler trap can then mail the contents of $STAGEFILE.log only, instead of a whole + # dinstall logfile. Short error mails ftw! + exec >> "${STAGEFILE}.log" 2>&1 + if [ -f "${LOCK_STOP}" ]; then log "${LOCK_STOP} exists, exiting immediately" exit 42 fi - if [ "${ERR}" = "false" ]; then + if [ "${error}" = "false" ]; then set +e fi ${FUNC} ${ARGS} @@ -426,12 +523,20 @@ function stage() { # Make sure we are always at the same place. cd ${configdir} - touch "${stagedir}/${FUNC}" + touch "${STAGEFILE}" if [ -n "${TIME}" ]; then ts "${TIME}" fi + # And the output goes back to the normal logfile + exec >> "$LOGFILE" 2>&1 + + # Now we should make sure that we have a usable dinstall.log, so append the $STAGEFILE.log + # to it. + cat "${STAGEFILE}.log" >> "${LOGFILE}" + rm -f "${STAGEFILE}.log" + if [ -f "${LOCK_STOP}" ]; then log "${LOCK_STOP} exists, exiting immediately" exit 42 @@ -468,8 +573,6 @@ DINSTALLSTART="${lockdir}/dinstallstart" # Marker for dinstall end DINSTALLEND="${lockdir}/dinstallend" -# Timestamps start at -1. so first gets 0 -TS=-1 touch "${DINSTALLSTART}" ts "startup" @@ -479,9 +582,12 @@ NOTICE="$ftpdir/Archive_Maintenance_In_Progress" # lock cron.unchecked (it immediately exits when this exists) LOCK_DAILY="$lockdir/daily.lock" -# Lock process-new and cron.unchecked from doing work +# Lock cron.unchecked from doing work LOCK_ACCEPTED="$lockdir/unchecked.lock" +# Lock process-new from doing work +LOCK_NEW="$lockdir/processnew.lock" + # This file is simply used to indicate to britney whether or not # the Packages file updates completed sucessfully. It's not a lock # from our point of view @@ -519,7 +625,7 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & GO=( FUNC="pgdump_pre" @@ -535,7 +641,7 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & GO=( FUNC="punew" @@ -559,9 +665,19 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & lockfile "$LOCK_ACCEPTED" +lockfile "$LOCK_NEW" + +GO=( + FUNC="process_unchecked" + TIME="" + ARGS="" + ERR="" +) +stage $GO + GO=( FUNC="accepted" @@ -571,6 +687,14 @@ GO=( ) stage $GO +GO=( + FUNC="buildd_dir" + TIME="buildd_dir" + ARGS="" + ERR="false" +) +stage $GO + GO=( FUNC="cruft" TIME="cruft" @@ -580,6 +704,7 @@ GO=( stage $GO rm -f "$LOCK_ACCEPTED" +rm -f "$LOCK_NEW" GO=( FUNC="msfl" @@ -595,7 +720,7 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & GO=( FUNC="overrides" @@ -675,7 +800,7 @@ GO=( ARGS="" ERR="" ) -stage $GO +stage $GO & rm -f "${NOTICE}" rm -f "${LOCK_DAILY}" @@ -688,23 +813,23 @@ GO=( ARGS="" ERR="" ) -stage $GO +stage $GO & GO=( - FUNC="pgdakdev" - TIME="dak-dev db" + FUNC="expire" + TIME="expire_dumps" ARGS="" - ERR="false" + ERR="" ) -stage $GO +stage $GO & GO=( - FUNC="expire" - TIME="expire_dumps" + FUNC="transitionsclean" + TIME="transitionsclean" ARGS="" ERR="" ) -stage $GO +stage $GO & GO=( FUNC="reports" @@ -712,7 +837,7 @@ GO=( ARGS="" ERR="" ) -stage $GO +stage $GO & GO=( FUNC="dm" @@ -720,7 +845,7 @@ GO=( ARGS="" ERR="" ) -stage $GO +stage $GO & GO=( FUNC="bts" @@ -728,7 +853,7 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & GO=( FUNC="merkel2" @@ -736,11 +861,11 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & GO=( - FUNC="runparts" - TIME="run-parts" + FUNC="mirrorpush" + TIME="mirrorpush" ARGS="" ERR="false" ) @@ -760,7 +885,7 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & GO=( FUNC="testingsourcelist" @@ -768,16 +893,33 @@ GO=( ARGS="" ERR="false" ) +stage $GO rm -f ${LOCK_BRITNEY} +GO=( + FUNC="pgdakdev" + TIME="dak-dev db" + ARGS="" + ERR="false" +) +stage $GO & + GO=( FUNC="aptftpcleanup" TIME="apt-ftparchive cleanup" ARGS="" - ERR="" + ERR="false" ) -stage $GO +stage $GO & + +GO=( + FUNC="merkel3" + TIME="merkel ddaccessible sync" + ARGS="" + ERR="false" +) +stage $GO & GO=( FUNC="compress"