X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=config%2Fdebian%2Fdinstall.functions;h=567516d7206f44aca107c088328436ff196a66ba;hb=07811af814a433ff071d9adb85440c376f7885c3;hp=57c67c791984f8335a3a2f8309fd48265a7c5147;hpb=ca19ea22806872ba8360086b121c468689fe98df;p=dak.git diff --git a/config/debian/dinstall.functions b/config/debian/dinstall.functions index 57c67c79..567516d7 100644 --- a/config/debian/dinstall.functions +++ b/config/debian/dinstall.functions @@ -1,3 +1,4 @@ +# -*- mode:sh -*- # Timestamp. Used for dinstall stat graphs function ts() { echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)" @@ -36,27 +37,37 @@ function merkel1() { # Create the postgres dump files function pgdump_pre() { - log "Creating pre-daily-cron-job backup of projectb database..." - pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) + log "Creating pre-daily-cron-job backup of $PGDATABASE database..." + pg_dump > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) } function pgdump_post() { - log "Creating post-daily-cron-job backup of projectb database..." + log "Creating post-daily-cron-job backup of $PGDATABASE database..." cd $base/backup POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S) - pg_dump projectb > $base/backup/dump_$POSTDUMP - pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP + pg_dump > $base/backup/dump_$POSTDUMP + #pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP ln -sf $base/backup/dump_$POSTDUMP current - ln -sf $base/backup/dumpall_$POSTDUMP currentall + #ln -sf $base/backup/dumpall_$POSTDUMP currentall } # Load the dak-dev projectb function pgdakdev() { + # Make sure to unset any possible psql variables so we don't drop the wrong + # f****** database by accident + local PGDATABASE + unset PGDATABASE + local PGHOST + unset PGHOST + local PGPORT + unset PGPORT + local PGUSER + unset PGUSER cd $base/backup - echo "drop database projectb" | psql -p 5433 template1 - cat currentall | psql -p 5433 template1 - createdb -p 5433 -T template0 projectb - fgrep -v '\connect' current | psql -p 5433 projectb + echo "drop database projectb" | psql -p 5434 template1 + #cat currentall | psql -p 5433 template1 + createdb -p 5434 -T template1 projectb + fgrep -v '\connect' current | psql -p 5434 projectb } # Updating various files @@ -71,17 +82,18 @@ function updates() { # Process (oldstable)-proposed-updates "NEW" queue function punew_do() { - cd "${queuedir}/${1}" date -u -R >> REPORT - dak process-new -a -C COMMENTS >> REPORT || true + dak process-policy $1 | tee -a REPORT | mail -e -s "NEW changes in $1" debian-release@lists.debian.org echo >> REPORT } function punew() { log "Doing automated p-u-new processing" + cd "${queuedir}/p-u-new" punew_do "$1" } function opunew() { log "Doing automated o-p-u-new processing" + cd "${queuedir}/o-p-u-new" punew_do "$1" } @@ -94,7 +106,7 @@ function i18n1() { # Now check if we still know about the packages for which they created the files # is the timestamp signed by us? - if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then + if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then # now read it. As its signed by us we are sure the content is what we expect, no need # to do more here. And we only test -d a directory on it anyway. TSTAMP=$(cat timestamp) @@ -129,9 +141,9 @@ function cruft() { dak check-overrides } -function msfl() { - log "Generating suite file lists for apt-ftparchive" - dak make-suite-file-list +function dominate() { + log "Removing obsolete source and binary associations" + dak dominate } function filelist() { @@ -142,30 +154,30 @@ function filelist() { function fingerprints() { log "Not updating fingerprints - scripts needs checking" -# log "Updating fingerprints" -# dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg + log "Updating fingerprints" + dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg -# OUTFILE=$(mktemp) -# dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}" + OUTFILE=$(mktemp) + dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}" -# if [ -s "${OUTFILE}" ]; then -# /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org < -#To: -#Subject: Debian Maintainers Keyring changes -#Content-Type: text/plain; charset=utf-8 -#MIME-Version: 1.0 -# -#The following changes to the debian-maintainers keyring have just been activated: -# -#$(cat $OUTFILE) -# -#Debian distribution maintenance software, -#on behalf of the Keyring maintainers -# -#EOF -# fi -# rm -f "$OUTFILE" + if [ -s "${OUTFILE}" ]; then + /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org < +To: +Subject: Debian Maintainers Keyring changes +Content-Type: text/plain; charset=utf-8 +MIME-Version: 1.0 + +The following changes to the debian-maintainers keyring have just been activated: + +$(cat $OUTFILE) + +Debian distribution maintenance software, +on behalf of the Keyring maintainers + +EOF + fi + rm -f "$OUTFILE" } function overrides() { @@ -206,15 +218,6 @@ function dakcleanup() { dak clean-queues } -function buildd() { - # Needs to be rebuilt, as files have moved. Due to unaccepts, we need to - # update this before wanna-build is updated. - log "Regenerating wanna-build/buildd information" - psql projectb -A -t -q -c "SELECT build_queue.path || '/' || build_queue_files.filename FROM build_queue_files LEFT JOIN build_queue ON (build_queue.id =build_queue_files.build_queue_id) WHERE queue_name = 'buildd' AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list - symlinks -d /srv/incoming.debian.org/buildd > /dev/null - apt-ftparchive generate apt.conf.buildd -} - function buildd_dir() { # Rebuilt the buildd dir to avoid long times of 403 log "Regenerating the buildd incoming dir" @@ -256,54 +259,59 @@ function mklslar() { } function mkmaintainers() { - log -n 'Creating Maintainers index ... ' + log 'Creating Maintainers index ... ' cd $indices dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \ - sed -e "s/~[^ ]*\([ ]\)/\1/" | awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers - - set +e - cmp .new-maintainers Maintainers >/dev/null - rc=$? - set -e - if [ $rc = 1 ] || [ ! -f Maintainers ] ; then - log -n "installing Maintainers ... " + sed -e "s/~[^ ]*\([ ]\)/\1/" | \ + awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers + + if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then + log "installing Maintainers ... " mv -f .new-maintainers Maintainers gzip --rsyncable -9v .new-maintainers.gz mv -f .new-maintainers.gz Maintainers.gz - elif [ $rc = 0 ] ; then - log '(same as before)' - rm -f .new-maintainers else - log cmp returned $rc - false + rm -f .new-maintainers + fi +} + +function mkuploaders() { + log 'Creating Uploaders index ... ' + + cd $indices + dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \ + sed -e "s/~[^ ]*\([ ]\)/\1/" | \ + awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders + + if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then + log "installing Uploaders ... " + mv -f .new-uploaders Uploaders + gzip --rsyncable -9v .new-uploaders.gz + mv -f .new-uploaders.gz Uploaders.gz + else + rm -f .new-uploaders fi } function copyoverrides() { log 'Copying override files into public view ...' - for f in $copyoverrides ; do + for ofile in $copyoverrides ; do cd $overridedir - chmod g+w override.$f + chmod g+w override.$ofile cd $indices - rm -f .newover-$f.gz - pc="`gzip 2>&1 -9nv <$overridedir/override.$f >.newover-$f.gz`" - set +e - nf=override.$f.gz - cmp -s .newover-$f.gz $nf - rc=$? - set -e - if [ $rc = 0 ]; then - rm -f .newover-$f.gz - elif [ $rc = 1 -o ! -f $nf ]; then - log " installing new $nf $pc" - mv -f .newover-$f.gz $nf - chmod g+w $nf - else - log $? $pc - exit 1 + + newofile=override.$ofile.gz + rm -f .newover-$ofile.gz + pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`" + if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then + log " installing new $newofile $pc" + mv -f .newover-$ofile.gz $newofile + chmod g+w $newofile + else + rm -f .newover-$ofile.gz fi done } @@ -314,8 +322,8 @@ function mkfilesindices() { ARCHLIST=$(tempfile) - log "Querying projectb..." - echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST + log "Querying $PGDATABASE..." + echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST includedirs () { perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }' @@ -324,7 +332,7 @@ function mkfilesindices() { perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);' } - log "Generating sources list + log "Generating sources list" ( sed -n 's/|$//p' $ARCHLIST cd $base/ftp @@ -332,7 +340,7 @@ function mkfilesindices() { find ./dists \! -type d | grep "/source/" ) | sort -u | gzip --rsyncable -9 > source.list.gz - log "Generating arch lists + log "Generating arch lists" ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u) for a in $ARCHES; do @@ -348,12 +356,12 @@ function mkfilesindices() { log "Generating suite lists" suite_list () { - printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb + printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t - printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb + printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t } - printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb | + printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At | while read id suite; do [ -e $base/ftp/dists/$suite ] || continue ( @@ -364,12 +372,11 @@ function mkfilesindices() { [ "$(readlink $distdir)" != "$distname" ] || echo $distdir done ) - suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,' + suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,' ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz done log "Finding everything on the ftp site to generate sundries" - (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST rm -f sundries.list @@ -390,7 +397,7 @@ function mkfilesindices() { done ) - (cat ../arch-i386.files ../arch-amd64.files; zcat suite-oldstable.list.gz suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) | + (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) | sort -u | poolfirst > ../typical.files rm -f $ARCHLIST @@ -409,26 +416,23 @@ function mkchecksums() { ${bindir}/dsync-flist -q link-dups $dsynclist || true } -function scripts() { - log "Running various scripts from $scriptsdir" - mkmaintainers - copyoverrides - mklslar - mkfilesindices - mkchecksums -} - function mirror() { log "Regenerating \"public\" mirror/ hardlink fun" + DATE_SERIAL=$(date +"%Y%m%d01") + FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} ) + if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then + SERIAL="$DATE_SERIAL" + else + SERIAL="$FILESOAPLUS1" + fi + date -u > ${TRACEFILE} + echo "Using dak v1" >> ${TRACEFILE} + echo "Running on host: $(hostname -f)" >> ${TRACEFILE} + echo "Archive serial: ${SERIAL}" >> ${TRACEFILE} cd ${mirrordir} rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. . } -function wb() { - log "Trigger daily wanna-build run" - ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org -} - function expire() { log "Expiring old database dumps..." cd $base/backup @@ -464,7 +468,7 @@ function bts() { function merkel2() { # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached - log "Trigger merkel/flotows projectb sync" + log "Trigger merkel/flotows $PGDATABASE sync" ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1 # Also trigger flotow, the ftpmaster test box ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1 @@ -493,7 +497,7 @@ function i18n2() { dak control-suite -l testing > squeeze dak control-suite -l unstable > sid echo "${STAMP}" > timestamp - gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp + gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp rm -f md5sum md5sum * > md5sum cd ${webdir}/ @@ -577,3 +581,25 @@ function process_unchecked() { do_unchecked sync_debbugs } + +# do a run of newstage only before dinstall is on. +function newstage() { + log "Processing the newstage queue" + UNCHECKED_WITHOUT_LOCK="-p" + do_newstage +} + +# Function to update a "statefile" telling people what we are doing +# (more or less). +# +# This should be called with the argument(s) +# - Status name we want to show. +# +function state() { + RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")" + cat >"${DINSTALLSTATE}" <