X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=config%2Fdebian%2Fcron.dinstall;h=7b1a2f2a7a5aaa8f1bc16082989907bd410d5f47;hb=6411bdb24f02bb6ab902502ae4b0c78bf7dc9034;hp=1c9fa5afefff3284fd27f860fcb63d40d5aad16d;hpb=fb11aec0c46bae0797a4e32b1d19eb6fcbc07c49;p=dak.git diff --git a/config/debian/cron.dinstall b/config/debian/cron.dinstall index 1c9fa5af..7b1a2f2a 100755 --- a/config/debian/cron.dinstall +++ b/config/debian/cron.dinstall @@ -35,7 +35,7 @@ set -u set -E # import the general variable set. -export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars +export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars . $SCRIPTVARS ######################################################################## @@ -44,419 +44,8 @@ export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars # common functions are "outsourced" . "${configdir}/common" -# Timestamp. Used for dinstall stat graphs -function ts() { - echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)" -} - -# Cleanup actions -function cleanup() { - rm -f ${LOCK_DAILY} - rm -f ${LOCK_ACCEPTED} -} - -# If we error out this one is called, *FOLLOWED* by cleanup above -function onerror() { - ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S") - - subject="ATTENTION ATTENTION!" - if [ "${error}" = "false" ]; then - subject="${subject} (continued)" - else - subject="${subject} (interrupted)" - fi - subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)" - - cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -} - -######################################################################## -# the actual dinstall functions follow # -######################################################################## - -# Setup the notice file to tell bad mirrors they used the wrong time -function notice() { - rm -f "$NOTICE" - cat > "$NOTICE" < $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) -} - -function pgdump_post() { - log "Creating post-daily-cron-job backup of projectb database..." - cd $base/backup - POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S) - pg_dump projectb > $base/backup/dump_$POSTDUMP - pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP - ln -sf $base/backup/dump_$POSTDUMP current - ln -sf $base/backup/dumpall_$POSTDUMP currentall -} - -# Load the dak-dev projectb -function pgdakdev() { - cd $base/backup - echo "drop database projectb" | psql -p 5433 template1 - cat currentall | psql -p 5433 template1 - createdb -p 5433 -T template0 projectb - fgrep -v '\connect' current | psql -p 5433 projectb -} - -# Updating various files -function updates() { - log "Updating Bugs docu, Mirror list and mailing-lists.txt" - cd $configdir - $scriptsdir/update-bugdoctxt - $scriptsdir/update-mirrorlists - $scriptsdir/update-mailingliststxt - $scriptsdir/update-pseudopackages.sh -} - -# Process (oldstable)-proposed-updates "NEW" queue -function punew_do() { - cd "${queuedir}/${1}" - date -u -R >> REPORT - dak process-new -a -C COMMENTS >> REPORT || true - echo >> REPORT -} -function punew() { - log "Doing automated p-u-new processing" - punew_do "$1" -} -function opunew() { - log "Doing automated o-p-u-new processing" - punew_do "$1" -} - -# The first i18n one, syncing new descriptions -function i18n1() { - log "Synchronizing i18n package descriptions" - # First sync their newest data - cd ${scriptdir}/i18nsync - rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true - - # Now check if we still know about the packages for which they created the files - # is the timestamp signed by us? - if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then - # now read it. As its signed by us we are sure the content is what we expect, no need - # to do more here. And we only test -d a directory on it anyway. - TSTAMP=$(cat timestamp) - # do we have the dir still? - if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then - # Lets check! - if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then - # Yay, worked, lets copy around - for dir in squeeze sid; do - if [ -d dists/${dir}/ ]; then - cd dists/${dir}/main/i18n - rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/. - fi - cd ${scriptdir}/i18nsync - done - else - echo "ARRRR, bad guys, wrong files, ARRR" - echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org - fi - else - echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR" - echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org - fi - else - echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR." - echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org - fi -} - -# Process the accepted queue -function accepted() { - log "Processing queue/accepted" - rm -f "$accepted/REPORT" - dak process-accepted -pa -d "$accepted" > "$accepted/REPORT" - cat "$accepted/REPORT" | mail -s "Install for $(date +"%D - %R")" ftpmaster@ftp-master.debian.org - chgrp debadmin "$accepted/REPORT" - chmod 664 "$accepted/REPORT" -} - -function cruft() { - log "Checking for cruft in overrides" - dak check-overrides - - log "Fixing symlinks in $ftpdir" - symlinks -d -r $ftpdir -} - -function msfl() { - log "Generating suite file lists for apt-ftparchive" - dak make-suite-file-list -} - -function fingerprints() { - log "Updating fingerprints" - dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg - - OUTFILE=$(mktemp) - dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}" - - if [ -s "${OUTFILE}" ]; then - /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org < -To: -Subject: Debian Maintainers Keyring changes -Content-Type: text/plain; charset=utf-8 -MIME-Version: 1.0 - -The following changes to the debian-maintainers keyring have just been activated: - -$(cat $OUTFILE) - -Debian distribution maintenance software, -on behalf of the Keyring maintainers - -EOF - fi - rm -f "$OUTFILE" -} - -function overrides() { - log "Writing overrides into text files" - cd $overridedir - dak make-overrides - - # FIXME - rm -f override.sid.all3 - for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done -} - -function mpfm() { - log "Generating package / file mapping" - dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2 -} - -function packages() { - log "Generating Packages and Sources files" - cd $configdir - apt-ftparchive generate apt.conf -} - -function pdiff() { - log "Generating pdiff files" - dak generate-index-diffs -} - -function release() { - log "Generating Release files" - dak generate-releases -} - -function dakcleanup() { - log "Cleanup old packages/files" - dak clean-suites -m 10000 - dak clean-queues -} - -function buildd() { - # Needs to be rebuilt, as files have moved. Due to unaccepts, we need to - # update this before wanna-build is updated. - log "Regenerating wanna-build/buildd information" - psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list - symlinks -d /srv/incoming.debian.org/buildd > /dev/null - apt-ftparchive generate apt.conf.buildd -} - -function buildd_dir() { - # Rebuilt the buildd dir to avoid long times of 403 - log "Regenerating the buildd incoming dir" - STAMP=$(date "+%Y%m%d%H%M") - make_buildd_dir -} - -function scripts() { - log "Running various scripts from $scriptsdir" - cd $scriptsdir - ./mkmaintainers - ./copyoverrides - ./mklslar - ./mkfilesindices - ./mkchecksums -} - -function mirror() { - echo "Regenerating \"public\" mirror/ hardlink fun" - cd ${mirrordir} - rsync -aH --link-dest ${ftpdir} --exclude Archive_Maintenance_In_Progress --delete --delete-after --ignore-errors ${ftpdir}/. . -} - -function wb() { - log "Trigger daily wanna-build run" - ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org -} - -function expire() { - log "Expiring old database dumps..." - cd $base/backup - $scriptsdir/expire_dumps -d . -p -f "dump_*" -} - -function transitionsclean() { - log "Removing out of date transitions..." - cd $base - dak transitions -c -a -} - -function reports() { - # Send a report on NEW/BYHAND packages - log "Nagging ftpteam about NEW/BYHAND packages" - dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org - # and one on crufty packages - log "Sending information about crufty packages" - dak cruft-report > $webdir/cruft-report-daily.txt - dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt - cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org -} - -function dm() { - log "Updating DM html page" - $scriptsdir/dm-monitor >$webdir/dm-uploaders.html -} - -function bts() { - log "Categorizing uncategorized bugs filed against ftp.debian.org" - dak bts-categorize -} - -function merkel2() { - # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached - log "Trigger merkel/flotows projectb sync" - ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1 - # Also trigger flotow, the ftpmaster test box - ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1 -} - -function merkel3() { - # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached - log "Trigger merkels dd accessible parts sync" - ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1 -} - -function mirrorpush() { - log "Starting the mirrorpush" - date -u > /srv/ftp.debian.org/web/mirrorstart - echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart - echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart - sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 & -} - -function i18n2() { - log "Exporting package data foo for i18n project" - STAMP=$(date "+%Y%m%d%H%M") - mkdir -p ${scriptdir}/i18n/${STAMP} - cd ${scriptdir}/i18n/${STAMP} - dak control-suite -l stable > lenny - dak control-suite -l testing > squeeze - dak control-suite -l unstable > sid - echo "${STAMP}" > timestamp - gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp - rm -f md5sum - md5sum * > md5sum - cd ${webdir}/ - ln -sfT ${scriptdir}/i18n/${STAMP} i18n - - cd ${scriptdir} - find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf -} - -function stats() { - log "Updating stats data" - cd $configdir - $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data - R --slave --vanilla < $base/misc/ftpstats.R - dak stats arch-space > $webdir/arch-space - dak stats pkg-nums > $webdir/pkg-nums -} - -function aptftpcleanup() { - log "Clean up apt-ftparchive's databases" - cd $configdir - apt-ftparchive -q clean apt.conf -} - -function compress() { - log "Compress old psql backups" - cd $base/backup/ - find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm - - find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 | - while read dumpname; do - echo "Compressing $dumpname" - bzip2 -9fv "$dumpname" - done - find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 | - while read dumpname; do - echo "Compressing $dumpname" - bzip2 -9fv "$dumpname" - done - finddup -l -d $base/backup -} - -function logstats() { - $masterdir/tools/logs.py "$1" -} - -# save timestamp when we start -function savetimestamp() { - NOW=`date "+%Y.%m.%d-%H:%M:%S"` - echo ${NOW} > "${dbdir}/dinstallstart" -} - -function maillogfile() { - cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -} - -function renamelogfile() { - if [ -f "${dbdir}/dinstallstart" ]; then - NOW=$(cat "${dbdir}/dinstallstart") -# maillogfile - mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" - logstats "$logdir/dinstall_${NOW}.log" - bzip2 -9 "$logdir/dinstall_${NOW}.log" - else - error "Problem, I don't know when dinstall started, unable to do log statistics." - NOW=`date "+%Y.%m.%d-%H:%M:%S"` -# maillogfile - mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" - bzip2 -9 "$logdir/dinstall_${NOW}.log" - fi -} - -function testingsourcelist() { - dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list -} - -# do a last run of process-unchecked before dinstall is on. -function process_unchecked() { - log "Processing the unchecked queue" - acceptnew - UNCHECKED_WITHOUT_LOCK="-p" - do_unchecked - sync_debbugs -} +# source the dinstall functions +. "${configdir}/dinstall.functions" ######################################################################## ######################################################################## @@ -523,6 +112,9 @@ function stage() { # Make sure we are always at the same place. cd ${configdir} + # We always use the same umask. If a function wants to do different, fine, but we reset. + umask 022 + touch "${STAGEFILE}" if [ -n "${TIME}" ]; then @@ -550,52 +142,20 @@ LOGFILE="$logdir/dinstall.log" exec >> "$LOGFILE" 2>&1 -# usually we are not using debug logs. Set to 1 if you want them. -DEBUG=0 - -# our name -PROGRAM="dinstall" +# And now source our default config +. "${configdir}/dinstall.variables" -# where do we want mails to go? For example log entries made with error() -if [ "x$(hostname -s)x" != "xriesx" ]; then - # Not our ftpmaster host - MAILTO=${MAILTO:-"root"} -else - # Yay, ftpmaster - MAILTO=${MAILTO:-"ftpmaster@debian.org"} -fi +# Make sure we start out with a sane umask setting +umask 022 -# How many logfiles to keep -LOGROTATE=${LOGROTATE:-400} - -# Marker for dinstall start -DINSTALLSTART="${lockdir}/dinstallstart" -# Marker for dinstall end -DINSTALLEND="${lockdir}/dinstallend" +# And use one locale, no matter what the caller has set +export LANG=C +export LC_ALL=C touch "${DINSTALLSTART}" ts "startup" - -# Tell everyone we are doing some work -NOTICE="$ftpdir/Archive_Maintenance_In_Progress" - -# lock cron.unchecked (it immediately exits when this exists) -LOCK_DAILY="$lockdir/daily.lock" - -# Lock cron.unchecked from doing work -LOCK_ACCEPTED="$lockdir/unchecked.lock" - -# Lock process-new from doing work -LOCK_NEW="$lockdir/processnew.lock" - -# This file is simply used to indicate to britney whether or not -# the Packages file updates completed sucessfully. It's not a lock -# from our point of view -LOCK_BRITNEY="$lockdir/britney.lock" - -# If this file exists we exit immediately after the currently running -# function is done -LOCK_STOP="$lockdir/archive.stop" +DINSTALLBEGIN="$(date -u +"%a %b %d %T %Z %Y (%s)")" +state "Startup" lockfile -l 3600 "${LOCK_DAILY}" trap onerror ERR @@ -611,14 +171,6 @@ GO=( ) stage $GO -GO=( - FUNC="notice" - TIME="" - ARGS="" - ERR="false" -) -stage $GO - GO=( FUNC="merkel1" TIME="init" @@ -643,22 +195,6 @@ GO=( ) stage $GO & -GO=( - FUNC="punew" - TIME="p-u-new" - ARGS="p-u-new" - ERR="" -) -stage $GO - -GO=( - FUNC="opunew" - TIME="o-p-u-new" - ARGS="o-p-u-new" - ERR="" -) -stage $GO - GO=( FUNC="i18n1" TIME="i18n 1" @@ -671,30 +207,29 @@ lockfile "$LOCK_ACCEPTED" lockfile "$LOCK_NEW" GO=( - FUNC="process_unchecked" - TIME="" - ARGS="" - ERR="" + FUNC="punew" + TIME="p-u-new" + ARGS="proposedupdates" + ERR="false" ) stage $GO +#GO=( +# FUNC="opunew" +# TIME="o-p-u-new" +# ARGS="oldproposedupdates" +# ERR="false" +#) +#stage $GO GO=( - FUNC="accepted" - TIME="accepted" + FUNC="newstage" + TIME="newstage" ARGS="" ERR="" ) stage $GO -GO=( - FUNC="buildd_dir" - TIME="buildd_dir" - ARGS="" - ERR="false" -) -stage $GO - GO=( FUNC="cruft" TIME="cruft" @@ -706,9 +241,19 @@ stage $GO rm -f "$LOCK_ACCEPTED" rm -f "$LOCK_NEW" +state "indices" + GO=( - FUNC="msfl" - TIME="make-suite-file-list" + FUNC="dominate" + TIME="dominate" + ARGS="" + ERR="" +) +stage $GO + +GO=( + FUNC="filelist" + TIME="generate-filelist" ARGS="" ERR="" ) @@ -736,16 +281,20 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & +state "packages/contents" GO=( FUNC="packages" TIME="apt-ftparchive" ARGS="" ERR="" ) +# Careful: When we ever go and remove this monster-long thing, we have to check the backgrounded +# functions before it. We no longer have a 1.5hour sync point then. stage $GO +state "dists/" GO=( FUNC="pdiff" TIME="pdiff" @@ -771,41 +320,74 @@ GO=( stage $GO GO=( - FUNC="buildd" - TIME="buildd" + FUNC="buildd_dir" + TIME="buildd_dir" ARGS="" ERR="" ) stage $GO +state "scripts" GO=( - FUNC="scripts" - TIME="scripts" + FUNC="mkmaintainers" + TIME="mkmaintainers" ARGS="" ERR="" ) stage $GO GO=( - FUNC="mirror" - TIME="mirror hardlinks" + FUNC="mkuploaders" + TIME="mkuploaders" ARGS="" ERR="" ) stage $GO GO=( - FUNC="wb" - TIME="w-b" + FUNC="copyoverrides" + TIME="copyoverrides" ARGS="" ERR="" ) -stage $GO & +stage $GO + +GO=( + FUNC="mklslar" + TIME="mklslar" + ARGS="" + ERR="" +) +stage $GO + +GO=( + FUNC="mkfilesindices" + TIME="mkfilesindices" + ARGS="" + ERR="" +) +stage $GO + +GO=( + FUNC="mkchecksums" + TIME="mkchecksums" + ARGS="" + ERR="" +) +stage $GO + +GO=( + FUNC="mirror" + TIME="mirror hardlinks" + ARGS="" + ERR="" +) +stage $GO -rm -f "${NOTICE}" rm -f "${LOCK_DAILY}" ts "locked part finished" +state "postlock" GO=( FUNC="pgdump_post" @@ -869,7 +451,7 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & GO=( FUNC="i18n2" @@ -877,7 +459,7 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & GO=( FUNC="stats" @@ -893,9 +475,9 @@ GO=( ARGS="" ERR="false" ) -stage $GO +stage $GO & -rm -f ${LOCK_BRITNEY} +rm -f "${LOCK_BRITNEY}" GO=( FUNC="pgdakdev" @@ -905,14 +487,6 @@ GO=( ) stage $GO & -GO=( - FUNC="aptftpcleanup" - TIME="apt-ftparchive cleanup" - ARGS="" - ERR="false" -) -stage $GO & - GO=( FUNC="merkel3" TIME="merkel ddaccessible sync" @@ -927,6 +501,14 @@ GO=( ARGS="" ERR="" ) +stage $GO & + +GO=( + FUNC="aptftpcleanup" + TIME="apt-ftparchive cleanup" + ARGS="" + ERR="false" +) stage $GO log "Daily cron scripts successful, all done" @@ -940,6 +522,7 @@ GO=( ERR="false" ) stage $GO +state "all done" # Now, at the very (successful) end of dinstall, make sure we remove