X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=config%2Fdebian%2Fcron.daily;h=0b822bc5148d06b0bc90ee1739ef5f399090866b;hb=5fd1e87ecc948980add7e9b41da71f69a3be9844;hp=059020eb8d521c6c0048876ed3d0396e8be01e7c;hpb=d6ba8ed668f25bd6f6e06c7c6d1965e9b010236f;p=dak.git diff --git a/config/debian/cron.daily b/config/debian/cron.daily index 059020eb..0b822bc5 100755 --- a/config/debian/cron.daily +++ b/config/debian/cron.daily @@ -1,214 +1,86 @@ -#! /bin/sh +#! /bin/bash # -# Executed daily via cron, out of dak's crontab. +# Run daily via cron, out of dak's crontab. set -e -export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars +set -o pipefail +set -u +export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars . $SCRIPTVARS -################################################################################ - -echo Archive maintenance started at $(date +%X) -TS=0 - -NOTICE="$ftpdir/Archive_Maintenance_In_Progress" -LOCKCU="$lockdir/daily.lock" -LOCKAC="$lockdir/unchecked.lock" -lockac=0 - -cleanup() { - rm -f "$NOTICE" - rm -f "$LOCKCU" - if [ "$lockac" -eq "1" ]; then - rm -f "$LOCKAC" - fi -} -lockfile -l 3600 $LOCKCU -trap cleanup 0 - -rm -f "$NOTICE" -cat > "$NOTICE" < $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) - -################################################################################ - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) -cd $configdir -$scriptsdir/update-bugdoctxt -$scriptsdir/update-mirrorlists -$scriptsdir/update-mailingliststxt - -################################################################################ - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) -cd $queuedir/p-u-new -date -u -R >> REPORT -dak process-new -a -C COMMENTS >> REPORT -echo >> REPORT - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) -cd $queuedir/o-p-u-new -date -u -R >> REPORT -dak process-new -a -C COMMENTS >> REPORT -echo >> REPORT - -################################################################################ - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) -lockfile $LOCKAC -lockac=1 -cd $accepted -rm -f REPORT -dak process-accepted -pa *.changes | tee REPORT | \ - mail -s "Install for $(date +%D)" ftpmaster@ftp-master.debian.org -chgrp debadmin REPORT -chmod 664 REPORT - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -dak check-overrides -rm -f $LOCKAC -lockac=0 - -symlinks -d -r $ftpdir - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -dak make-suite-file-list - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -# Update fingerprints -# [JT - disabled, dak import-ldap-fingerprints currently can ask questions] -#dak import-ldap-fingerprints - -# Generate override files -cd $overridedir -dak make-overrides - -# Update task overrides for testing and unstable -# [JT 2004-02-04 disabled; copying in by hand for now] -#cat $extoverridedir/task | perl -ne 'print if /^\S+\sTask\s\S+(,\s*\S+)*$/;' > override.sarge.extra.main -#cat $extoverridedir/task | perl -ne 'print if /^\S+\sTask\s\S+(,\s*\S+)*$/;' > override.sid.extra.main - -# FIXME -rm -f override.sid.all3 -for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -# Generate Packages and Sources files -cd $configdir -apt-ftparchive generate apt.conf -# Generate *.diff/ incremental updates -dak generate-index-diffs -# Generate Release files -dak generate-releases - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -# Clean out old packages -dak clean-suites -dak clean-queues - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -# Needs to be rebuilt, as files have moved. Due to unaccepts, we need to -# update this before wanna-build is updated. -psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list -symlinks -d /srv/incoming.debian.org/buildd > /dev/null -apt-ftparchive generate apt.conf.buildd - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -cd $scriptsdir -./mkmaintainers -./copyoverrides -./mklslar -./mkfilesindices -./mkchecksums -# -# Fetch bugs information before unchecked processing is allowed again. -$base/testing/britney allowdaklock bugs || true -rm -f $NOTICE -ssh buildd@buildd /org/wanna-build/trigger.daily - -rm -f $LOCKCU -echo Archive maintenance finished at $(date +%X) - -################################################################################ - -echo "Creating post-daily-cron-job backup of projectb database..." -POSTDUMP=$base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) -pg_dump projectb > $POSTDUMP -(cd $base/backup; ln -sf $POSTDUMP current) - -################################################################################ - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -# Vacuum the database -echo "VACUUM; VACUUM ANALYZE;" | psql projectb 2>&1 | grep -v "^NOTICE: Skipping.*only table owner can VACUUM it$" - -################################################################################ - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -# Send a report on NEW/BYHAND packages -dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org -# and one on crufty packages -dak cruft-report | tee $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org +# common functions are "outsourced" +. "${configdir}/common" ################################################################################ -# Run mirror-split +TMPFILE=$( mktemp -p ${TMPDIR} ) +TMPCNTB=$( mktemp -p ${TMPDIR} ) -#time dak mirror-split +function cleanup { + ERRVAL=$? + rm -f ${TMPFILE} ${TMPCNTB} + exit ${ERRVAL} +} +trap cleanup SIGHUP SIGINT SIGPIPE SIGTERM EXIT ERR + +# log to dinstall's logfile instead of sending email +PROGRAM="cron.daily" +# Start logging +NOW=`date "+%Y.%m.%d-%H:%M:%S"` +LOGFILE="$logdir/daily_${NOW}.log" +exec >> "$LOGFILE" 2>&1 + +# get the latest list of wnpp bugs and their source packages +wget -q -O${TMPFILE} --ca-directory=/etc/ssl/ca-debian https://qa.debian.org/data/bts/wnpp_rm +chmod go+r ${TMPFILE} +mv ${TMPFILE} /srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm + +# Push files over to security +# The key over there should have the following set for the ssh key: +# command="/usr/bin/xzcat | /usr/bin/psql -1 -c 'DELETE FROM external_files; COPY external_files (id, filename, size, md5sum, last_used, sha1sum, sha256sum, created, modified) FROM STDIN' obscurity" +psql -c 'COPY files (id, filename, size, md5sum, last_used, sha1sum, sha256sum, created, modified) TO STDOUT' projectb | \ + xz -3 | \ + ssh -o BatchMode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 \ + -i ${base}/s3kr1t/push_external_files dak@security-master.debian.org sync + +# Update wanna-build dump +log "Update wanna-build database dump" +$base/dak/scripts/nfu/get-w-b-db + +reports + +clean_debbugs + +# Generate list of override disparities +dak override-disparity | gzip -9 > ${webdir}/override-disparity.gz + +# Generate stats about the new queue +dak stats new ${webdir}/NEW-stats.yaml 2> /dev/null + +# Generate the contributor data +# FIXME: In a day or three, when this worked from cron without +# failure, redirect its output to dev/null. Alternatively until then +# enrico added a --quiet and we use that. +log "Submitting data to contributors" +REQUESTS_CA_BUNDLE=/etc/ssl/ca-debian/ca-certificates.crt dc-tool --mine="${configdir}/contributor.source" --auth-token @"${base}/s3kr1t/contributor.auth" --source ftp.debian.org --json > ${TMPCNTB} + +# Post with curl as a workaround for #801506 +# See https://wiki.debian.org/ServicesSSL#curl +dir=/etc/ssl/ca-debian +test -d $dir && capath="--capath $dir" +curl -s $capath https://contributors.debian.org/contributors/post \ + -F source=ftp.debian.org \ + -F auth_token="$(cat ${base}/s3kr1t/contributor.auth)" \ + -F data=@${TMPCNTB} > ${TMPCNTB}.result +cat ${TMPCNTB}.result +rm -f ${TMPCNTB}.result + + +${scriptsdir}/link_morgue.sh ################################################################################ -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -ulimit -m 90000 -d 90000 -s 10000 -v 200000 - -run-parts --report $base/scripts/distmnt - -echo Daily cron scripts successful. - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) +log "Finally, all is done, compressing logfile" +exec > /dev/null 2>&1 -# Stats pr0n -cd $configdir -$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data -R --slave --vanilla < $base/misc/ftpstats.R - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -# Clean up apt-ftparchive's databases - -cd $configdir -apt-ftparchive -q clean apt.conf - -TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X) - -# Compress psql backups older than a month, but no more than 20 of them - -(cd $base/backup/ - find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +30 | - sort | head -n20 | while read dumpname; do - echo "Compressing $dumpname" - bzip2 -9 "$dumpname" - done -) - -################################################################################ +bzip2 -9 "$LOGFILE"