X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=config%2Fdebian%2Fdinstall.functions;h=f3ab3816cab27390a13f6fd0c35782e5c29665cf;hb=ec257c02a5d62fd27844c70814acd9616b24b4c8;hp=463dec3799a5d227692d43bf3ee9c3f8622d4137;hpb=4fbe76ca4e30a4515d2111a27d3067259f890355;p=dak.git diff --git a/config/debian/dinstall.functions b/config/debian/dinstall.functions index 463dec37..f3ab3816 100644 --- a/config/debian/dinstall.functions +++ b/config/debian/dinstall.functions @@ -4,10 +4,14 @@ function ts() { echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)" } -# Cleanup actions +# Remove all locks +function remove_locks() { + rm -f $LOCK_DAILY $LOCK_ACCEPTED $LOCK_NEW +} + +# trap handler for cleaning up on signal / error function cleanup() { - rm -f ${LOCK_DAILY} - rm -f ${LOCK_ACCEPTED} + remove_locks } # If we error out this one is called, *FOLLOWED* by cleanup above @@ -22,7 +26,11 @@ function onerror() { fi subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)" - cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org + if [ -r "${STAGEFILE}.log" ]; then + cat "${STAGEFILE}.log" + else + echo "file ${STAGEFILE}.log does not exist, sorry" + fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org } ######################################################################## @@ -32,32 +40,42 @@ function onerror() { # pushing merkels QA user, part one function merkel1() { log "Telling merkels QA user that we start dinstall" - ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1 + ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1 } # Create the postgres dump files function pgdump_pre() { - log "Creating pre-daily-cron-job backup of projectb database..." - pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) + log "Creating pre-daily-cron-job backup of $PGDATABASE database..." + pg_dump > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) } function pgdump_post() { - log "Creating post-daily-cron-job backup of projectb database..." + log "Creating post-daily-cron-job backup of $PGDATABASE database..." cd $base/backup POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S) - pg_dump projectb > $base/backup/dump_$POSTDUMP - pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP + pg_dump > $base/backup/dump_$POSTDUMP + #pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP ln -sf $base/backup/dump_$POSTDUMP current - ln -sf $base/backup/dumpall_$POSTDUMP currentall + #ln -sf $base/backup/dumpall_$POSTDUMP currentall } # Load the dak-dev projectb function pgdakdev() { + # Make sure to unset any possible psql variables so we don't drop the wrong + # f****** database by accident + local PGDATABASE + unset PGDATABASE + local PGHOST + unset PGHOST + local PGPORT + unset PGPORT + local PGUSER + unset PGUSER cd $base/backup - echo "drop database projectb" | psql -p 5433 template1 - cat currentall | psql -p 5433 template1 - createdb -p 5433 -T template0 projectb - fgrep -v '\connect' current | psql -p 5433 projectb + echo "drop database projectb" | psql -p 5434 template1 + #cat currentall | psql -p 5433 template1 + createdb -p 5434 -T template1 projectb + fgrep -v '\connect' current | psql -p 5434 projectb } # Updating various files @@ -73,7 +91,7 @@ function updates() { # Process (oldstable)-proposed-updates "NEW" queue function punew_do() { date -u -R >> REPORT - dak process-policy $1 | tee -a REPORT | mail -e -s "NEW changes in $1" debian-release@lists.debian.org + dak process-policy $1 | tee -a REPORT | mail -a "X-Debian: DAK" -e -s "NEW changes in $1" debian-release@lists.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org echo >> REPORT } function punew() { @@ -96,7 +114,7 @@ function i18n1() { # Now check if we still know about the packages for which they created the files # is the timestamp signed by us? - if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then + if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then # now read it. As its signed by us we are sure the content is what we expect, no need # to do more here. And we only test -d a directory on it anyway. TSTAMP=$(cat timestamp) @@ -114,15 +132,15 @@ function i18n1() { done else echo "ARRRR, bad guys, wrong files, ARRR" - echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org + echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org fi else echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR" - echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org + echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org fi else echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR." - echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org + echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org fi } @@ -156,6 +174,7 @@ From: Debian FTP Masters To: Subject: Debian Maintainers Keyring changes Content-Type: text/plain; charset=utf-8 +X-Debian: DAK MIME-Version: 1.0 The following changes to the debian-maintainers keyring have just been activated: @@ -188,8 +207,8 @@ function mpfm() { function packages() { log "Generating Packages and Sources files" cd $configdir - GZIP='--rsyncable' ; export GZIP - apt-ftparchive generate apt.conf + #apt-ftparchive generate apt.conf + dak generate-packages-sources } function pdiff() { @@ -238,13 +257,13 @@ function mklslar() { mv -f ${FILENAME}.gz ${FILENAME}.old.gz mv -f .${FILENAME}.new ${FILENAME} rm -f ${FILENAME}.patch.gz - zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz + zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz rm -f ${FILENAME}.old.gz else mv -f .${FILENAME}.new ${FILENAME} fi - gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz + gzip -9cfN ${FILENAME} >${FILENAME}.gz rm -f ${FILENAME} } @@ -259,13 +278,31 @@ function mkmaintainers() { if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then log "installing Maintainers ... " mv -f .new-maintainers Maintainers - gzip --rsyncable -9v .new-maintainers.gz + gzip -9v .new-maintainers.gz mv -f .new-maintainers.gz Maintainers.gz else rm -f .new-maintainers fi } +function mkuploaders() { + log 'Creating Uploaders index ... ' + + cd $indices + dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \ + sed -e "s/~[^ ]*\([ ]\)/\1/" | \ + awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders + + if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then + log "installing Uploaders ... " + mv -f .new-uploaders Uploaders + gzip -9v .new-uploaders.gz + mv -f .new-uploaders.gz Uploaders.gz + else + rm -f .new-uploaders + fi +} + function copyoverrides() { log 'Copying override files into public view ...' @@ -294,8 +331,8 @@ function mkfilesindices() { ARCHLIST=$(tempfile) - log "Querying projectb..." - echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST + log "Querying $PGDATABASE..." + echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST includedirs () { perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }' @@ -310,7 +347,7 @@ function mkfilesindices() { cd $base/ftp find ./dists -maxdepth 1 \! -type d find ./dists \! -type d | grep "/source/" - ) | sort -u | gzip --rsyncable -9 > source.list.gz + ) | sort -u | gzip -9 > source.list.gz log "Generating arch lists" @@ -322,18 +359,18 @@ function mkfilesindices() { cd $base/ftp find ./dists -maxdepth 1 \! -type d find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)" - ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz + ) | sort -u | gzip -9 > arch-$a.list.gz done log "Generating suite lists" suite_list () { - printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb + printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t - printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb + printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t } - printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb | + printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At | while read id suite; do [ -e $base/ftp/dists/$suite ] || continue ( @@ -344,8 +381,8 @@ function mkfilesindices() { [ "$(readlink $distdir)" != "$distname" ] || echo $distdir done ) - suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,' - ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz + suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,' + ) | sort -u | gzip -9 > suite-${suite}.list.gz done log "Finding everything on the ftp site to generate sundries" @@ -365,11 +402,11 @@ function mkfilesindices() { (cd $base/ftp/ for dist in sid squeeze; do - find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz + find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz done ) - (cat ../arch-i386.files ../arch-amd64.files; zcat suite-oldstable.list.gz suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) | + (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) | sort -u | poolfirst > ../typical.files rm -f $ARCHLIST @@ -384,15 +421,23 @@ function mkchecksums() { cd "$ftpdir" ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5 - ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz + ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz ${bindir}/dsync-flist -q link-dups $dsynclist || true } function mirror() { log "Regenerating \"public\" mirror/ hardlink fun" - date -u > ${ftpdir}/project/trace/ftp-master.debian.org - echo "Using dak v1" >> ${ftpdir}/project/trace/ftp-master.debian.org - echo "Running on host: $(hostname -f)" >> ${ftpdir}/project/trace/ftp-master.debian.org + DATE_SERIAL=$(date +"%Y%m%d01") + FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} ) + if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then + SERIAL="$DATE_SERIAL" + else + SERIAL="$FILESOAPLUS1" + fi + date -u > ${TRACEFILE} + echo "Using dak v1" >> ${TRACEFILE} + echo "Running on host: $(hostname -f)" >> ${TRACEFILE} + echo "Archive serial: ${SERIAL}" >> ${TRACEFILE} cd ${mirrordir} rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. . } @@ -409,17 +454,6 @@ function transitionsclean() { dak transitions -c -a } -function reports() { - # Send a report on NEW/BYHAND packages - log "Nagging ftpteam about NEW/BYHAND packages" - dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org - # and one on crufty packages - log "Sending information about crufty packages" - dak cruft-report > $webdir/cruft-report-daily.txt - dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt - cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org -} - function dm() { log "Updating DM html page" $scriptsdir/dm-monitor >$webdir/dm-uploaders.html @@ -432,7 +466,7 @@ function bts() { function merkel2() { # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached - log "Trigger merkel/flotows projectb sync" + log "Trigger merkel/flotows $PGDATABASE sync" ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1 # Also trigger flotow, the ftpmaster test box ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1 @@ -461,7 +495,7 @@ function i18n2() { dak control-suite -l testing > squeeze dak control-suite -l unstable > sid echo "${STAMP}" > timestamp - gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp + gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp rm -f md5sum md5sum * > md5sum cd ${webdir}/ @@ -515,7 +549,7 @@ function savetimestamp() { } function maillogfile() { - cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org + cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org } function renamelogfile() { @@ -567,3 +601,13 @@ Current action: ${1} Action start: ${RIGHTNOW} EOF } + +# extract changelogs and stuff +function changelogs() { + log "Extracting changelogs" + dak make-changelog -e + mkdir -p ${exportpublic}/changelogs + cd ${exportpublic}/changelogs + rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. . + sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 & +}