# -*- mode:sh -*- # Timestamp. Used for dinstall stat graphs function ts() { echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)" } # Cleanup actions function cleanup() { rm -f ${LOCK_DAILY} rm -f ${LOCK_ACCEPTED} } # If we error out this one is called, *FOLLOWED* by cleanup above function onerror() { ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S") subject="ATTENTION ATTENTION!" if [ "${error}" = "false" ]; then subject="${subject} (continued)" else subject="${subject} (interrupted)" fi subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)" cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@backports.debian.org } ######################################################################## # the actual dinstall functions follow # ######################################################################## # pushing merkels QA user, part one function merkel1() { log "Telling merkels QA user that we start dinstall" ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1 } # Create the postgres dump files function pgdump_pre() { log "Creating pre-daily-cron-job backup of $PGDATABASE database..." pg_dump > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) } function pgdump_post() { log "Creating post-daily-cron-job backup of $PGDATABASE database..." cd $base/backup POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S) pg_dump > $base/backup/dump_$POSTDUMP #pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP ln -sf $base/backup/dump_$POSTDUMP current #ln -sf $base/backup/dumpall_$POSTDUMP currentall } # Load the dak-dev projectb function pgdakdev() { # Make sure to unset any possible psql variables so we don't drop the wrong # f****** database by accident local PGDATABASE unset PGDATABASE local PGHOST unset PGHOST local PGPORT unset PGPORT local PGUSER unset PGUSER cd $base/backup echo "drop database projectb" | psql -p 5434 template1 #cat currentall | psql -p 5433 template1 createdb -p 5434 -T template1 projectb fgrep -v '\connect' current | psql -p 5434 projectb } # Updating various files function updates() { log "Updating Bugs docu, Mirror list and mailing-lists.txt" cd $configdir $scriptsdir/update-bugdoctxt $scriptsdir/update-mirrorlists $scriptsdir/update-mailingliststxt $scriptsdir/update-pseudopackages.sh } # Process (oldstable)-proposed-updates "NEW" queue function punew_do() { date -u -R >> REPORT dak process-policy $1 | tee -a REPORT | mail -e -s "NEW changes in $1" team@backports.debian.org echo >> REPORT } function punew() { log "Doing automated p-u-new processing" cd "${queuedir}/p-u-new" punew_do "$1" } function opunew() { log "Doing automated o-p-u-new processing" cd "${queuedir}/o-p-u-new" punew_do "$1" } # The first i18n one, syncing new descriptions function i18n1() { log "Synchronizing i18n package descriptions" # First sync their newest data cd ${scriptdir}/i18nsync rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true # Now check if we still know about the packages for which they created the files # is the timestamp signed by us? if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then # now read it. As its signed by us we are sure the content is what we expect, no need # to do more here. And we only test -d a directory on it anyway. TSTAMP=$(cat timestamp) # do we have the dir still? if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then # Lets check! if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then # Yay, worked, lets copy around for dir in squeeze sid; do if [ -d dists/${dir}/ ]; then cd dists/${dir}/main/i18n rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/. fi cd ${scriptdir}/i18nsync done else echo "ARRRR, bad guys, wrong files, ARRR" echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org fi else echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR" echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org fi else echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR." echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org fi } function cruft() { log "Checking for cruft in overrides" dak check-overrides } function dominate() { log "Removing obsolete source and binary associations" dak dominate } function filelist() { log "Generating file lists for apt-ftparchive" dak generate-filelist } function fingerprints() { log "Not updating fingerprints - scripts needs checking" log "Updating fingerprints" dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg OUTFILE=$(mktemp) dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}" if [ -s "${OUTFILE}" ]; then /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org < To: Subject: Debian Maintainers Keyring changes Content-Type: text/plain; charset=utf-8 MIME-Version: 1.0 The following changes to the debian-maintainers keyring have just been activated: $(cat $OUTFILE) Debian distribution maintenance software, on behalf of the Keyring maintainers EOF fi rm -f "$OUTFILE" } function overrides() { log "Writing overrides into text files" cd $overridedir dak make-overrides # FIXME rm -f override.sid.all3 for i in main contrib non-free main.debian-installer; do cat override.lenny-backports.$i >> override.sid.all3; done } function mpfm() { log "Generating package / file mapping" dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2 } function packages() { log "Generating Packages and Sources files" cd $configdir apt-ftparchive generate apt.conf #dak generate-packages-sources } function pdiff() { log "Generating pdiff files" dak generate-index-diffs } function release() { log "Generating Release files" dak generate-releases } function dakcleanup() { log "Cleanup old packages/files" dak clean-suites -m 10000 dak clean-queues } function buildd_dir() { # Rebuilt the buildd dir to avoid long times of 403 log "Regenerating the buildd incoming dir" STAMP=$(date "+%Y%m%d%H%M") make_buildd_dir } function mklslar() { cd $ftpdir FILENAME=ls-lR log "Removing any core files ..." find -type f -name core -print0 | xargs -0r rm -v log "Checking permissions on files in the FTP tree ..." find -type f \( \! -perm -444 -o -perm +002 \) -ls find -type d \( \! -perm -555 -o -perm +002 \) -ls log "Checking symlinks ..." symlinks -rd . log "Creating recursive directory listing ... " rm -f .${FILENAME}.new TZ=UTC ls -lR > .${FILENAME}.new if [ -r ${FILENAME}.gz ] ; then mv -f ${FILENAME}.gz ${FILENAME}.old.gz mv -f .${FILENAME}.new ${FILENAME} rm -f ${FILENAME}.patch.gz zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz rm -f ${FILENAME}.old.gz else mv -f .${FILENAME}.new ${FILENAME} fi gzip -9cfN ${FILENAME} >${FILENAME}.gz rm -f ${FILENAME} } function mkmaintainers() { log 'Creating Maintainers index ... ' cd $indices dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \ sed -e "s/~[^ ]*\([ ]\)/\1/" | \ awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then log "installing Maintainers ... " mv -f .new-maintainers Maintainers gzip -9v .new-maintainers.gz mv -f .new-maintainers.gz Maintainers.gz else rm -f .new-maintainers fi } function mkuploaders() { log 'Creating Uploaders index ... ' cd $indices dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \ sed -e "s/~[^ ]*\([ ]\)/\1/" | \ awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then log "installing Uploaders ... " mv -f .new-uploaders Uploaders gzip -9v .new-uploaders.gz mv -f .new-uploaders.gz Uploaders.gz else rm -f .new-uploaders fi } function copyoverrides() { log 'Copying override files into public view ...' for ofile in $copyoverrides ; do cd $overridedir chmod g+w override.$ofile cd $indices newofile=override.$ofile.gz rm -f .newover-$ofile.gz pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`" if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then log " installing new $newofile $pc" mv -f .newover-$ofile.gz $newofile chmod g+w $newofile else rm -f .newover-$ofile.gz fi done } function mkfilesindices() { umask 002 cd $base/ftp/indices/files/components ARCHLIST=$(tempfile) log "Querying postgres..." echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST includedirs () { perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }' } poolfirst () { perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);' } log "Generating sources list" ( sed -n 's/|$//p' $ARCHLIST cd $base/ftp find ./dists -maxdepth 1 \! -type d find ./dists \! -type d | grep "/source/" ) | sort -u | gzip -9 > source.list.gz log "Generating arch lists" ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u) for a in $ARCHES; do (sed -n "s/|$a$//p" $ARCHLIST sed -n 's/|all$//p' $ARCHLIST cd $base/ftp find ./dists -maxdepth 1 \! -type d find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)" ) | sort -u | gzip -9 > arch-$a.list.gz done log "Generating suite lists" suite_list () { printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t } printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At | while read id suite; do [ -e $base/ftp/dists/$suite ] || continue ( (cd $base/ftp distname=$(cd dists; readlink $suite || echo $suite) find ./dists/$distname \! -type d for distdir in ./dists/*; do [ "$(readlink $distdir)" != "$distname" ] || echo $distdir done ) suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,' ) | sort -u | gzip -9 > suite-${suite}.list.gz done log "Finding everything on the ftp site to generate sundries" (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST rm -f sundries.list zcat *.list.gz | cat - *.list | sort -u | diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list log "Generating files list" for a in $ARCHES; do (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) | cat - sundries.list dists.list project.list docs.list indices.list | sort -u | poolfirst > ../arch-$a.files done (cd $base/ftp/ for dist in sid squeeze; do find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz done ) (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) | sort -u | poolfirst > ../typical.files rm -f $ARCHLIST log "Done!" } function mkchecksums() { dsynclist=$dbdir/dsync.list md5list=$indices/md5sums log -n "Creating md5 / dsync index file ... " cd "$ftpdir" ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz ${bindir}/dsync-flist -q link-dups $dsynclist || true } function mirror() { log "Regenerating \"public\" mirror/ hardlink fun" DATE_SERIAL=$(date +"%Y%m%d01") FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} ) if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then SERIAL="$DATE_SERIAL" else SERIAL="$FILESOAPLUS1" fi date -u > ${TRACEFILE} echo "Using dak v1" >> ${TRACEFILE} echo "Running on host: $(hostname -f)" >> ${TRACEFILE} echo "Archive serial: ${SERIAL}" >> ${TRACEFILE} cd ${mirrordir} rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. . } function expire() { log "Expiring old database dumps..." cd $base/backup $scriptsdir/expire_dumps -d . -p -f "dump_*" } function transitionsclean() { log "Removing out of date transitions..." cd $base dak transitions -c -a } function dm() { log "Updating DM html page" $scriptsdir/dm-monitor >$webdir/dm-uploaders.html } function bts() { log "Categorizing uncategorized bugs filed against ftp.debian.org" dak bts-categorize } function merkel2() { # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached log "Trigger merkel/flotows $PGDATABASE sync" ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1 # Also trigger flotow, the ftpmaster test box ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1 } function merkel3() { # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached log "Trigger merkels dd accessible parts sync" ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1 } function mirrorpush() { log "Starting the mirrorpush" date -u > /srv/backports-web.debian.org/underlay/mirrorstart echo "Using dak v1" >> /srv/backports-web.debian.org/underlay/mirrorstart echo "Running on host $(hostname -f)" >> /srv/backports-web.debian.org/underlay/mirrorstart sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 & } function i18n2() { log "Exporting package data foo for i18n project" STAMP=$(date "+%Y%m%d%H%M") mkdir -p ${scriptdir}/i18n/${STAMP} cd ${scriptdir}/i18n/${STAMP} dak control-suite -l stable > lenny dak control-suite -l testing > squeeze dak control-suite -l unstable > sid echo "${STAMP}" > timestamp gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp rm -f md5sum md5sum * > md5sum cd ${webdir}/ ln -sfT ${scriptdir}/i18n/${STAMP} i18n cd ${scriptdir} find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf } function stats() { log "Updating stats data" cd $configdir #$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data #R --slave --vanilla < $base/misc/ftpstats.R dak stats arch-space > $webdir/arch-space dak stats pkg-nums > $webdir/pkg-nums } function aptftpcleanup() { log "Clean up apt-ftparchive's databases" cd $configdir apt-ftparchive -q clean apt.conf } function compress() { log "Compress old psql backups" cd $base/backup/ find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 | while read dumpname; do echo "Compressing $dumpname" bzip2 -9fv "$dumpname" done find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 | while read dumpname; do echo "Compressing $dumpname" bzip2 -9fv "$dumpname" done finddup -l -d $base/backup } function logstats() { $masterdir/tools/logs.py "$1" } # save timestamp when we start function savetimestamp() { NOW=`date "+%Y.%m.%d-%H:%M:%S"` echo ${NOW} > "${dbdir}/dinstallstart" } function maillogfile() { cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org } function renamelogfile() { if [ -f "${dbdir}/dinstallstart" ]; then NOW=$(cat "${dbdir}/dinstallstart") # maillogfile mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" # logstats "$logdir/dinstall_${NOW}.log" bzip2 -9 "$logdir/dinstall_${NOW}.log" else error "Problem, I don't know when dinstall started, unable to do log statistics." NOW=`date "+%Y.%m.%d-%H:%M:%S"` # maillogfile mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" bzip2 -9 "$logdir/dinstall_${NOW}.log" fi } function testingsourcelist() { dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list } # do a last run of process-unchecked before dinstall is on. function process_unchecked() { log "Processing the unchecked queue" UNCHECKED_WITHOUT_LOCK="-p" do_unchecked sync_debbugs } # do a run of newstage only before dinstall is on. function newstage() { log "Processing the newstage queue" UNCHECKED_WITHOUT_LOCK="-p" do_newstage } # Function to update a "statefile" telling people what we are doing # (more or less). # # This should be called with the argument(s) # - Status name we want to show. # function state() { RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")" cat >"${DINSTALLSTATE}" < ~dak/runmirrors-metadata.log 2>&1 & }