X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=config%2Fdebian%2Fdinstall.functions;h=cac7c7c5ecef56db68f79c36f7b45585d8315d27;hb=8e32a7b21801472eb15b034e407ea04de053feb9;hp=0ad674ea1d39a5cbe2a8a8a8fbbcc888ebebb154;hpb=64f709a508e27db13034026b86f0c794b0574da3;p=dak.git diff --git a/config/debian/dinstall.functions b/config/debian/dinstall.functions index 0ad674ea..cac7c7c5 100644 --- a/config/debian/dinstall.functions +++ b/config/debian/dinstall.functions @@ -4,10 +4,14 @@ function ts() { echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)" } -# Cleanup actions -function cleanup() { - rm -f ${LOCK_DAILY} - rm -f ${LOCK_ACCEPTED} +# Remove daily lock +function remove_daily_lock() { + rm -f $LOCK_DAILY +} + +# Remove all locks +function remove_all_locks() { + rm -f $LOCK_DAILY $LOCK_ACCEPTED $LOCK_NEW } # If we error out this one is called, *FOLLOWED* by cleanup above @@ -22,7 +26,11 @@ function onerror() { fi subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)" - cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + if [ -r "${STAGEFILE}.log" ]; then + cat "${STAGEFILE}.log" + else + echo "file ${STAGEFILE}.log does not exist, sorry" + fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org } ######################################################################## @@ -30,46 +38,11 @@ function onerror() { ######################################################################## # pushing merkels QA user, part one -function merkel1() { - log "Telling merkels QA user that we start dinstall" +function qa1() { + log "Telling QA user that we start dinstall" ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1 } -# Create the postgres dump files -function pgdump_pre() { - log "Creating pre-daily-cron-job backup of $PGDATABASE database..." - pg_dump > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) -} - -function pgdump_post() { - log "Creating post-daily-cron-job backup of $PGDATABASE database..." - cd $base/backup - POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S) - pg_dump > $base/backup/dump_$POSTDUMP - #pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP - ln -sf $base/backup/dump_$POSTDUMP current - #ln -sf $base/backup/dumpall_$POSTDUMP currentall -} - -# Load the dak-dev projectb -function pgdakdev() { - # Make sure to unset any possible psql variables so we don't drop the wrong - # f****** database by accident - local PGDATABASE - unset PGDATABASE - local PGHOST - unset PGHOST - local PGPORT - unset PGPORT - local PGUSER - unset PGUSER - cd $base/backup - echo "drop database projectb" | psql -p 5434 template1 - #cat currentall | psql -p 5433 template1 - createdb -p 5434 -T template1 projectb - fgrep -v '\connect' current | psql -p 5434 projectb -} - # Updating various files function updates() { log "Updating Bugs docu, Mirror list and mailing-lists.txt" @@ -115,7 +88,7 @@ function i18n1() { # Lets check! if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then # Yay, worked, lets copy around - for dir in squeeze sid; do + for dir in wheezy sid; do if [ -d dists/${dir}/ ]; then cd dists/${dir}/main/i18n rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/. @@ -152,8 +125,6 @@ function filelist() { } function fingerprints() { - log "Not updating fingerprints - scripts needs checking" - log "Updating fingerprints" dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg @@ -199,9 +170,9 @@ function mpfm() { function packages() { log "Generating Packages and Sources files" cd $configdir - GZIP='--rsyncable' ; export GZIP #apt-ftparchive generate apt.conf dak generate-packages-sources + dak contents generate } function pdiff() { @@ -250,13 +221,13 @@ function mklslar() { mv -f ${FILENAME}.gz ${FILENAME}.old.gz mv -f .${FILENAME}.new ${FILENAME} rm -f ${FILENAME}.patch.gz - zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz + zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz rm -f ${FILENAME}.old.gz else mv -f .${FILENAME}.new ${FILENAME} fi - gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz + gzip -9cfN ${FILENAME} >${FILENAME}.gz rm -f ${FILENAME} } @@ -271,7 +242,7 @@ function mkmaintainers() { if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then log "installing Maintainers ... " mv -f .new-maintainers Maintainers - gzip --rsyncable -9v .new-maintainers.gz + gzip -9v .new-maintainers.gz mv -f .new-maintainers.gz Maintainers.gz else rm -f .new-maintainers @@ -289,7 +260,7 @@ function mkuploaders() { if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then log "installing Uploaders ... " mv -f .new-uploaders Uploaders - gzip --rsyncable -9v .new-uploaders.gz + gzip -9v .new-uploaders.gz mv -f .new-uploaders.gz Uploaders.gz else rm -f .new-uploaders @@ -324,7 +295,7 @@ function mkfilesindices() { ARCHLIST=$(tempfile) - log "Querying $PGDATABASE..." + log "Querying postgres" echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST includedirs () { @@ -340,7 +311,7 @@ function mkfilesindices() { cd $base/ftp find ./dists -maxdepth 1 \! -type d find ./dists \! -type d | grep "/source/" - ) | sort -u | gzip --rsyncable -9 > source.list.gz + ) | sort -u | gzip -9 > source.list.gz log "Generating arch lists" @@ -352,7 +323,7 @@ function mkfilesindices() { cd $base/ftp find ./dists -maxdepth 1 \! -type d find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)" - ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz + ) | sort -u | gzip -9 > arch-$a.list.gz done log "Generating suite lists" @@ -375,7 +346,7 @@ function mkfilesindices() { done ) suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,' - ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz + ) | sort -u | gzip -9 > suite-${suite}.list.gz done log "Finding everything on the ftp site to generate sundries" @@ -394,12 +365,12 @@ function mkfilesindices() { done (cd $base/ftp/ - for dist in sid squeeze; do - find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz + for dist in sid wheezy; do + find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz done ) - (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) | + (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-wheezy.list.gz) | sort -u | poolfirst > ../typical.files rm -f $ARCHLIST @@ -414,7 +385,7 @@ function mkchecksums() { cd "$ftpdir" ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5 - ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz + ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz ${bindir}/dsync-flist -q link-dups $dsynclist || true } @@ -432,7 +403,7 @@ function mirror() { echo "Running on host: $(hostname -f)" >> ${TRACEFILE} echo "Archive serial: ${SERIAL}" >> ${TRACEFILE} cd ${mirrordir} - rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. . + rsync -aH --link-dest ${ftpdir} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${ftpdir}/. . } function expire() { @@ -457,18 +428,10 @@ function bts() { dak bts-categorize } -function merkel2() { - # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached - log "Trigger merkel/flotows $PGDATABASE sync" - ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1 - # Also trigger flotow, the ftpmaster test box - ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1 -} - -function merkel3() { - # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached - log "Trigger merkels dd accessible parts sync" - ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1 +function ddaccess() { + # Tell our dd accessible mirror to sync itself up. Including ftp dir. + log "Trigger dd accessible parts sync including ftp dir" + ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org pool } function mirrorpush() { @@ -484,8 +447,8 @@ function i18n2() { STAMP=$(date "+%Y%m%d%H%M") mkdir -p ${scriptdir}/i18n/${STAMP} cd ${scriptdir}/i18n/${STAMP} - dak control-suite -l stable > lenny - dak control-suite -l testing > squeeze + dak control-suite -l stable > squeeze + dak control-suite -l testing > wheezy dak control-suite -l unstable > sid echo "${STAMP}" > timestamp gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp @@ -513,22 +476,10 @@ function aptftpcleanup() { apt-ftparchive -q clean apt.conf } -function compress() { - log "Compress old psql backups" +function cleantransactions() { + log "Cleanup transaction ids older than 3 months" cd $base/backup/ - find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm - - find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 | - while read dumpname; do - echo "Compressing $dumpname" - bzip2 -9fv "$dumpname" - done - find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 | - while read dumpname; do - echo "Compressing $dumpname" - bzip2 -9fv "$dumpname" - done - finddup -l -d $base/backup + find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm } function logstats() {