X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=config%2Fdebian%2Fdinstall.functions;h=faa063330c11bb5ce10a2f5ed6996a2bc4fadb01;hb=511e1c01bb858cb49a272a65a3cd31cf82d73ac1;hp=cac7c7c5ecef56db68f79c36f7b45585d8315d27;hpb=b8690d6b1256a4a95198b874d9eb1aab62a825fb;p=dak.git diff --git a/config/debian/dinstall.functions b/config/debian/dinstall.functions old mode 100644 new mode 100755 index cac7c7c5..9ca44338 --- a/config/debian/dinstall.functions +++ b/config/debian/dinstall.functions @@ -1,73 +1,99 @@ # -*- mode:sh -*- -# Timestamp. Used for dinstall stat graphs -function ts() { - echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)" -} -# Remove daily lock -function remove_daily_lock() { - rm -f $LOCK_DAILY +# Remove changelog lock +function remove_changelog_lock() { + rm -f $LOCK_CHANGELOG } # Remove all locks function remove_all_locks() { - rm -f $LOCK_DAILY $LOCK_ACCEPTED $LOCK_NEW + rm -f $LOCK_DAILY $LOCK_ACCEPTED +} + +# Get rid of all locks and unset the trap +function remove_locks { + remove_all_locks + trap - EXIT TERM HUP INT QUIT + ts "locked part finished" } -# If we error out this one is called, *FOLLOWED* by cleanup above +# Lock accepted +function lockaccepted { + lockfile "$LOCK_ACCEPTED" + trap remove_all_locks EXIT TERM HUP INT QUIT +} + +# If we error out this one is called, *FOLLOWED* by cleanup in common function onerror() { ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S") subject="ATTENTION ATTENTION!" - if [ "${error}" = "false" ]; then + if [[ ${error} = false ]]; then subject="${subject} (continued)" else subject="${subject} (interrupted)" fi subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)" - if [ -r "${STAGEFILE}.log" ]; then + if [[ -r ${STAGEFILE}.log ]]; then cat "${STAGEFILE}.log" else echo "file ${STAGEFILE}.log does not exist, sorry" - fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters " cron@ftp-master.debian.org } ######################################################################## -# the actual dinstall functions follow # +# the actual functions follow # ######################################################################## # pushing merkels QA user, part one function qa1() { log "Telling QA user that we start dinstall" - ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1 + ssh -n -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1 } -# Updating various files -function updates() { - log "Updating Bugs docu, Mirror list and mailing-lists.txt" - cd $configdir - $scriptsdir/update-bugdoctxt - $scriptsdir/update-mirrorlists - $scriptsdir/update-mailingliststxt - $scriptsdir/update-pseudopackages.sh +function mirrorlists() { + local mldir="${scriptdir}/mirrorlist" + local masterlist=${mldir}/Mirrors.masterlist + + cd ${mldir} + + [[ -f ${HOME}/.cvspass ]] || touch ${HOME}/.cvspass + + grep -q "anonscm.debian.org:/cvs/webwml" ~/.cvspass || \ + echo ":pserver:anonymous@anonscm.debian.org:/cvs/webwml A" >> ${HOME}/.cvspass + + cvs update + + if [[ ! -f ${ftpdir}/README.mirrors.html ]] || [[ ${masterlist} -nt ${ftpdir}/README.mirrors.html ]]; then + rm -f ${ftpdir}/README.mirrors.{html,txt} + ${mldir}/mirror_list.pl -m ${masterlist} -t html > ${ftpdir}/README.mirrors.html + ${mldir}/mirror_list.pl -m ${masterlist} -t text > ${ftpdir}/README.mirrors.txt + log Updated archive version of mirrors file + fi } -# Process (oldstable)-proposed-updates "NEW" queue -function punew_do() { - date -u -R >> REPORT - dak process-policy $1 | tee -a REPORT | mail -a "X-Debian: DAK" -e -s "NEW changes in $1" debian-release@lists.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org - echo >> REPORT +function mailingliststxt() { + cd ${ftpdir}/doc + + log "Updating archive version of mailing-lists.txt" + wget ${wgetopts} https://www.debian.org/misc/mailing-lists.txt } -function punew() { - log "Doing automated p-u-new processing" - cd "${queuedir}/p-u-new" - punew_do "$1" + +function pseudopackages() { + cd ${scriptdir}/masterfiles + + log "Updating archive version of pseudo-packages" + for file in maintainers description; do + wget ${wgetopts} https://bugs.debian.org/pseudopackages/pseudo-packages.${file} + done } -function opunew() { - log "Doing automated o-p-u-new processing" - cd "${queuedir}/o-p-u-new" - punew_do "$1" + +# Updating various files +function bugdoctxt() { + log "Updating Bugs docu" + cd $configdir + $scriptsdir/update-bugdoctxt } # The first i18n one, syncing new descriptions @@ -79,33 +105,61 @@ function i18n1() { # Now check if we still know about the packages for which they created the files # is the timestamp signed by us? - if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then + if gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp; then # now read it. As its signed by us we are sure the content is what we expect, no need # to do more here. And we only test -d a directory on it anyway. TSTAMP=$(cat timestamp) # do we have the dir still? - if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then + if [[ -d ${scriptdir}/i18n/${TSTAMP} ]]; then # Lets check! if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then # Yay, worked, lets copy around - for dir in wheezy sid; do - if [ -d dists/${dir}/ ]; then + for dir in ${extimportdists}; do + if [[ -d dists/${dir}/ ]]; then cd dists/${dir}/main/i18n - rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/. + rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/. fi cd ${scriptdir}/i18nsync done else echo "ARRRR, bad guys, wrong files, ARRR" - echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters " debian-l10n-devel@lists.alioth.debian.org fi else echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR" - echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters " debian-l10n-devel@lists.alioth.debian.org fi else echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR." - echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters " debian-l10n-devel@lists.alioth.debian.org + fi +} + +# Syncing AppStream/DEP-11 data +function dep11() { + log "Synchronizing AppStream metadata" + # First sync their newest data + local dep11dir="${scriptdir}/dep11" + mkdir -p ${dep11dir} + cd ${dep11dir} + rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true + + # Lets check! + if ${scriptsdir}/dep11-basic-validate.py . ${dep11dir}/; then + # Yay, worked, lets copy around + for dir in ${extimportdists}; do + if [[ -d ${dir}/ ]]; then + for comp in main contrib non-free; do + mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11 + cd ${dir}/${comp} + rsync -aq --delete --delete-after --exclude ./*.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/. + cd ${dep11dir} + done + fi + done + else + echo "ARRRR, bad guys, wrong files, ARRR" + echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters " mak@debian.org fi } @@ -117,21 +171,23 @@ function cruft() { function dominate() { log "Removing obsolete source and binary associations" dak dominate + dak manage-debug-suites unstable-debug experimental-debug } -function filelist() { - log "Generating file lists for apt-ftparchive" - dak generate-filelist +function autocruft() { + log "Check for obsolete binary packages" + dak auto-decruft -s unstable + dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU" } function fingerprints() { log "Updating fingerprints" dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg - OUTFILE=$(mktemp) + OUTFILE=$( gettempfile ) dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}" - if [ -s "${OUTFILE}" ]; then + if [[ -s ${OUTFILE} ]]; then /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org < To: @@ -156,23 +212,26 @@ function overrides() { log "Writing overrides into text files" cd $overridedir dak make-overrides - - # FIXME - rm -f override.sid.all3 - for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done } function mpfm() { + local archiveroot + log "Generating package / file mapping" - dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2 + for archive in "${public_archives[@]}"; do + log " archive: ${archive}" + archiveroot="$(get_archiveroot "${archive}")" + dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2" + done } function packages() { - log "Generating Packages and Sources files" - cd $configdir - #apt-ftparchive generate apt.conf - dak generate-packages-sources - dak contents generate + for archive in "${public_archives[@]}"; do + log " Generating Packages/Sources for ${archive}" + dak generate-packages-sources2 -a "${archive}" + log " Generating Contents for ${archive}" + dak contents generate -a "${archive}" + done } function pdiff() { @@ -182,121 +241,93 @@ function pdiff() { function release() { log "Generating Release files" - dak generate-releases + for archive in "${public_archives[@]}"; do + log " archive: ${archive}" + dak generate-releases -a "${archive}" + done } function dakcleanup() { log "Cleanup old packages/files" dak clean-suites -m 10000 - dak clean-queues -} - -function buildd_dir() { - # Rebuilt the buildd dir to avoid long times of 403 - log "Regenerating the buildd incoming dir" - STAMP=$(date "+%Y%m%d%H%M") - make_buildd_dir + dak clean-queues -i "$unchecked" } function mklslar() { - cd $ftpdir - - FILENAME=ls-lR + local archiveroot + local FILENAME=ls-lR - log "Removing any core files ..." - find -type f -name core -print0 | xargs -0r rm -v + for archive in "${public_archives[@]}"; do + archiveroot="$(get_archiveroot "${archive}")" + cd "${archiveroot}" - log "Checking permissions on files in the FTP tree ..." - find -type f \( \! -perm -444 -o -perm +002 \) -ls - find -type d \( \! -perm -555 -o -perm +002 \) -ls + log "Removing any core files ..." + find -type f -name core -print -delete - log "Checking symlinks ..." - symlinks -rd . + log "Checking symlinks ..." + symlinks -rd . - log "Creating recursive directory listing ... " - rm -f .${FILENAME}.new - TZ=UTC ls -lR > .${FILENAME}.new - - if [ -r ${FILENAME}.gz ] ; then - mv -f ${FILENAME}.gz ${FILENAME}.old.gz - mv -f .${FILENAME}.new ${FILENAME} - rm -f ${FILENAME}.patch.gz - zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz - rm -f ${FILENAME}.old.gz - else - mv -f .${FILENAME}.new ${FILENAME} - fi - - gzip -9cfN ${FILENAME} >${FILENAME}.gz - rm -f ${FILENAME} + log "Creating recursive directory listing ... " + rm -f ${FILENAME}.gz + TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz + done } function mkmaintainers() { - log 'Creating Maintainers index ... ' - - cd $indices - dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \ - sed -e "s/~[^ ]*\([ ]\)/\1/" | \ - awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers - - if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then - log "installing Maintainers ... " - mv -f .new-maintainers Maintainers - gzip -9v .new-maintainers.gz - mv -f .new-maintainers.gz Maintainers.gz - else - rm -f .new-maintainers - fi -} - -function mkuploaders() { - log 'Creating Uploaders index ... ' + local archiveroot + local indices - cd $indices - dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \ - sed -e "s/~[^ ]*\([ ]\)/\1/" | \ - awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders + log 'Creating Maintainers index ... ' - if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then - log "installing Uploaders ... " - mv -f .new-uploaders Uploaders - gzip -9v .new-uploaders.gz - mv -f .new-uploaders.gz Uploaders.gz - else - rm -f .new-uploaders - fi + for archive in "${public_archives[@]}"; do + archiveroot="$(get_archiveroot "${archive}")" + indices="${archiveroot}/indices" + if ! [[ -d ${indices} ]]; then + mkdir "${indices}" + fi + cd "${indices}" + + dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers + gzip -9v --rsyncable Maintainers.gz + gzip -9v --rsyncable Uploaders.gz + done } function copyoverrides() { log 'Copying override files into public view ...' - for ofile in $copyoverrides ; do - cd $overridedir - chmod g+w override.$ofile - - cd $indices - - newofile=override.$ofile.gz - rm -f .newover-$ofile.gz - pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`" - if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then - log " installing new $newofile $pc" - mv -f .newover-$ofile.gz $newofile - chmod g+w $newofile - else - rm -f .newover-$ofile.gz - fi - done + ( + shopt -s nullglob + for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do + bname=${ofile##*/} + gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz + chmod g+w ${indices}/${bname}.gz + done + ) } function mkfilesindices() { + set +o pipefail umask 002 cd $base/ftp/indices/files/components ARCHLIST=$(tempfile) log "Querying postgres" - echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST + local query=" + SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string + FROM files f + JOIN files_archive_map af ON f.id = af.file_id + JOIN component c ON af.component_id = c.id + JOIN archive ON af.archive_id = archive.id + LEFT OUTER JOIN + (binaries b + JOIN architecture a ON b.architecture = a.id) + ON f.id = b.file + WHERE archive.name = 'ftp-master' + ORDER BY path, arch_string + " + psql -At -c "$query" >$ARCHLIST includedirs () { perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }' @@ -318,128 +349,261 @@ function mkfilesindices() { ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u) for a in $ARCHES; do (sed -n "s/|$a$//p" $ARCHLIST - sed -n 's/|all$//p' $ARCHLIST + sed -n 's/|all$//p' $ARCHLIST - cd $base/ftp - find ./dists -maxdepth 1 \! -type d - find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)" + cd $base/ftp + find ./dists -maxdepth 1 \! -type d + find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)" ) | sort -u | gzip -9 > arch-$a.list.gz done log "Generating suite lists" suite_list () { - printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t - - printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t + local suite_id="$(printf %d $1)" + local query + query=" + SELECT DISTINCT './pool/' || c.name || '/' || f.filename + FROM + (SELECT sa.source AS source + FROM src_associations sa + WHERE sa.suite = $suite_id + UNION + SELECT esr.src_id + FROM extra_src_references esr + JOIN bin_associations ba ON esr.bin_id = ba.bin + WHERE ba.suite = $suite_id + UNION + SELECT b.source AS source + FROM bin_associations ba + JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s + JOIN dsc_files df ON s.source = df.source + JOIN files f ON df.file = f.id + JOIN files_archive_map af ON f.id = af.file_id + JOIN component c ON af.component_id = c.id + JOIN archive ON af.archive_id = archive.id + WHERE archive.name = 'ftp-master' + " + psql -F' ' -A -t -c "$query" + + query=" + SELECT './pool/' || c.name || '/' || f.filename + FROM bin_associations ba + JOIN binaries b ON ba.bin = b.id + JOIN files f ON b.file = f.id + JOIN files_archive_map af ON f.id = af.file_id + JOIN component c ON af.component_id = c.id + JOIN archive ON af.archive_id = archive.id + WHERE ba.suite = $suite_id AND archive.name = 'ftp-master' + " + psql -F' ' -A -t -c "$query" } - printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At | - while read id suite; do - [ -e $base/ftp/dists/$suite ] || continue - ( - (cd $base/ftp - distname=$(cd dists; readlink $suite || echo $suite) - find ./dists/$distname \! -type d - for distdir in ./dists/*; do - [ "$(readlink $distdir)" != "$distname" ] || echo $distdir - done - ) - suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,' - ) | sort -u | gzip -9 > suite-${suite}.list.gz - done + psql -F' ' -At -c "SELECT id, suite_name FROM suite" | + while read id suite; do + [[ -e $base/ftp/dists/$suite ]] || continue + ( + (cd $base/ftp + distname=$(cd dists; readlink $suite || echo $suite) + find ./dists/$distname \! -type d + for distdir in ./dists/*; do + [[ $(readlink $distdir) != $distname ]] || echo $distdir + done + ) + suite_list $id + ) | sort -u | gzip -9 > suite-${suite}.list.gz + done log "Finding everything on the ftp site to generate sundries" (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST rm -f sundries.list zcat *.list.gz | cat - *.list | sort -u | - diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list + diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list log "Generating files list" for a in $ARCHES; do (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) | - cat - sundries.list dists.list project.list docs.list indices.list | - sort -u | poolfirst > ../arch-$a.files + cat - sundries.list dists.list project.list docs.list indices.list | + sort -u | poolfirst > ../arch-$a.files done (cd $base/ftp/ - for dist in sid wheezy; do - find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz - done + for dist in sid jessie stretch; do + find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz + done ) - (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-wheezy.list.gz) | - sort -u | poolfirst > ../typical.files + (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) | + sort -u | poolfirst > ../typical.files rm -f $ARCHLIST log "Done!" + set -o pipefail } function mkchecksums() { - dsynclist=$dbdir/dsync.list - md5list=$indices/md5sums + local archiveroot dsynclist md5list - log -n "Creating md5 / dsync index file ... " + for archive in "${public_archives[@]}"; do + archiveroot="$(get_archiveroot "${archive}")" + dsynclist=$dbdir/dsync.${archive}.list + md5list=${archiveroot}/indices/md5sums - cd "$ftpdir" - ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5 - ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz - ${bindir}/dsync-flist -q link-dups $dsynclist || true + log -n "Creating md5 / dsync index file for ${archive}... " + + cd "$archiveroot" + ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5 + ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz + ${bindir}/dsync-flist -q link-dups $dsynclist || true + done } function mirror() { - log "Regenerating \"public\" mirror/ hardlink fun" - DATE_SERIAL=$(date +"%Y%m%d01") - FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} ) - if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then - SERIAL="$DATE_SERIAL" - else - SERIAL="$FILESOAPLUS1" - fi - date -u > ${TRACEFILE} - echo "Using dak v1" >> ${TRACEFILE} - echo "Running on host: $(hostname -f)" >> ${TRACEFILE} - echo "Archive serial: ${SERIAL}" >> ${TRACEFILE} - cd ${mirrordir} - rsync -aH --link-dest ${ftpdir} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${ftpdir}/. . + local archiveroot targetpath TRACEFILE + + for archive in "${public_archives[@]}"; do + archiveroot="$(get_archiveroot "${archive}")" + targetpath="${mirrordir}/${archive}" + TRACEFILE="${archiveroot}/project/trace/ftp-master.debian.org" + mkdir -p "${archiveroot}/project/trace/" + + log "Regenerating \"public\" mirror/${archive} hardlink fun" + DATE_SERIAL=$(date +"%Y%m%d01") + FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} || echo ${DATE_SERIAL} ) + if [[ ${DATE_SERIAL} -gt ${FILESOAPLUS1} ]]; then + SERIAL="${DATE_SERIAL}" + else + SERIAL="${FILESOAPLUS1}" + fi + RFC822DATE=$(LC_ALL=POSIX LANG=POSIX date -u -R) + date -u > ${TRACEFILE} + { + echo "Using dak v1" + echo "Running on host: $(hostname -f)" + echo "Archive serial: ${SERIAL}" + echo "Date: ${RFC822DATE}" + } >> ${TRACEFILE} + # Now make it accessible via one name, no matter on which host we run + cd ${archiveroot}/project/trace/ + ln -sf ftp-master.debian.org master + + mkdir -p ${targetpath} + cd ${targetpath} + rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. . + done } function expire() { log "Expiring old database dumps..." - cd $base/backup - $scriptsdir/expire_dumps -d . -p -f "dump_*" + cd ${base}/backup + ${scriptsdir}/expire_dumps -d . -p -f "dump_*" } function transitionsclean() { log "Removing out of date transitions..." - cd $base + cd ${base} dak transitions -c -a } function dm() { - log "Updating DM html page" - $scriptsdir/dm-monitor >$webdir/dm-uploaders.html + log "Updating DM permissions page" + dak acl export-per-source dm >${exportdir}/dm.txt } function bts() { log "Categorizing uncategorized bugs filed against ftp.debian.org" - dak bts-categorize + sudo -u dak-unpriv dak bts-categorize } -function ddaccess() { - # Tell our dd accessible mirror to sync itself up. Including ftp dir. - log "Trigger dd accessible parts sync including ftp dir" - ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org pool +function mirrorpush() { + log "Checking the public archive copies..." + + local archiveroot targetpath + + for archive in "${public_archives[@]}"; do + log "... archive: ${archive}" + archiveroot="$(get_archiveroot "${archive}")" + targetpath="${mirrordir}/${archive}" + cd ${archiveroot}/dists + + broken=0 + for release in $(find . -name "InRelease"); do + echo "Processing: ${release}" + subdir=${release%/InRelease} + while read SHASUM SIZE NAME; do + if ! [[ -f ${subdir}/${NAME} ]]; then + bname=$(basename ${NAME}) + if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-[a-zA-Z0-9-]+\.yml|icons-(128x128|64x64)\.tar)$ ]]; then + + # We don't keep unpacked files, don't check for their existance. + # We might want to go and check their unpacked shasum, but right now + # I don't care. I believe it should be enough if all the packed shasums + # match. + continue + fi + broken=$(( broken + 1 )) + echo "File ${subdir}/${NAME} is missing" + continue + fi + + # We do have symlinks in the tree (see the contents files currently). + # So we use "readlink -f" to check the size of the target, as thats basically + # what gen-releases does + fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}")) + if [[ ${fsize} -ne ${SIZE} ]]; then + broken=$(( broken + 1 )) + echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}" + continue + fi + + fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}")) + fshasum=${fshasum%% *} + if [[ ${fshasum} != ${SHASUM} ]]; then + broken=$(( broken + 1 )) + echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}" + continue + fi + done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d') + done + + if [[ $broken -gt 0 ]]; then + log_error "Trouble with the public mirror for ${archive}, found ${broken} errors" + continue + else + log "Starting the mirrorpush for ${archive}" + case ${archive} in + ftp-master) + fname="mirrorstart" + pusharg="" + pname="debian" + ;; + debian-debug) + pusharg="-a debug" + ;;& + backports) + pusharg="-a backports" + ;;& + *) + fname="mirrorstart.${archive}" + pname=${archive} + ;; + esac + mkdir -p ${webdir}/${pname}/project/trace/ + ln -sf ${mirrordir}/${archive}/project/trace/master ${webdir}/${pname}/project/trace/master + + date -u > ${webdir}/${fname} + echo "Using dak v1" >> ${webdir}/${fname} + echo "Running on host $(hostname -f)" >> ${webdir}/${fname} + sudo -H -u archvsync /home/archvsync/runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 & + fi + done } -function mirrorpush() { - log "Starting the mirrorpush" - date -u > /srv/ftp.debian.org/web/mirrorstart - echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart - echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart - sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 & +function mirrorpush-backports() { + log "Syncing backports mirror" + sudo -u backports /home/backports/bin/update-archive } function i18n2() { @@ -447,11 +611,14 @@ function i18n2() { STAMP=$(date "+%Y%m%d%H%M") mkdir -p ${scriptdir}/i18n/${STAMP} cd ${scriptdir}/i18n/${STAMP} - dak control-suite -l stable > squeeze - dak control-suite -l testing > wheezy - dak control-suite -l unstable > sid + for suite in stable testing unstable; do + codename=$(dak admin s show ${suite}|grep '^Codename') + codename=${codename##* } + echo "Codename is ${codename}" + dak control-suite -l ${suite} >${codename} + done echo "${STAMP}" > timestamp - gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp + gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp rm -f md5sum md5sum * > md5sum cd ${webdir}/ @@ -463,74 +630,36 @@ function i18n2() { function stats() { log "Updating stats data" - cd $configdir - $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data - R --slave --vanilla < $base/misc/ftpstats.R - dak stats arch-space > $webdir/arch-space - dak stats pkg-nums > $webdir/pkg-nums -} - -function aptftpcleanup() { - log "Clean up apt-ftparchive's databases" - cd $configdir - apt-ftparchive -q clean apt.conf + cd ${configdir} + ${scriptsdir}/update-ftpstats ${base}/log/* > ${base}/misc/ftpstats.data + R --slave --vanilla < ${base}/misc/ftpstats.R + dak stats arch-space > ${webdir}/arch-space + dak stats pkg-nums > ${webdir}/pkg-nums } function cleantransactions() { log "Cleanup transaction ids older than 3 months" - cd $base/backup/ - find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm + cd ${base}/backup/ + find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete } function logstats() { - $masterdir/tools/logs.py "$1" + ${masterdir}/tools/logs.py "$1" } # save timestamp when we start function savetimestamp() { - NOW=`date "+%Y.%m.%d-%H:%M:%S"` - echo ${NOW} > "${dbdir}/dinstallstart" + echo ${NOW} > "${dbdir}/dinstallstart" } function maillogfile() { - cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org -} - -function renamelogfile() { - if [ -f "${dbdir}/dinstallstart" ]; then - NOW=$(cat "${dbdir}/dinstallstart") -# maillogfile - mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" - logstats "$logdir/dinstall_${NOW}.log" - bzip2 -9 "$logdir/dinstall_${NOW}.log" - else - error "Problem, I don't know when dinstall started, unable to do log statistics." - NOW=`date "+%Y.%m.%d-%H:%M:%S"` -# maillogfile - mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" - bzip2 -9 "$logdir/dinstall_${NOW}.log" - fi + mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters " cron@ftp-master.debian.org < "${LOGFILE}" } function testingsourcelist() { dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list } -# do a last run of process-unchecked before dinstall is on. -function process_unchecked() { - log "Processing the unchecked queue" - UNCHECKED_WITHOUT_LOCK="-p" - do_unchecked - sync_debbugs -} - -# do a run of newstage only before dinstall is on. -function newstage() { - log "Processing the newstage queue" - UNCHECKED_WITHOUT_LOCK="-p" - do_newstage -} - # Function to update a "statefile" telling people what we are doing # (more or less). # @@ -548,10 +677,55 @@ EOF # extract changelogs and stuff function changelogs() { - log "Extracting changelogs" - dak make-changelog -e - mkdir -p ${exportpublic}/changelogs - cd ${exportpublic}/changelogs - rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. . - sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 & + if lockfile -r3 ${LOCK_CHANGELOG}; then + trap remove_changelog_lock EXIT TERM HUP INT QUIT + log "Extracting changelogs" + dak make-changelog -e -a ftp-master + [[ -f ${exportdir}/changelogs/filelist.yaml ]] && xz -f ${exportdir}/changelogs/filelist.yaml + mkdir -p ${exportpublic}/changelogs + cd ${exportpublic}/changelogs + rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. . + sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 & + + dak make-changelog -e -a backports + [[ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ]] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml + mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs + cd /srv/backports-master.debian.org/rsync/export/changelogs + rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. . + remove_changelog_lock + fi +} + +# Generate a list of extra mirror files, sha256sum em and sign that +function signotherfiles() { + log "Signing extra mirror files" + + local archiveroot + + for archive in "${public_archives[@]}"; do + log "... archive: ${archive}" + archiveroot="$(get_archiveroot "${archive}")" + local TMPLO=$( gettempfile ) + + cd ${archiveroot} + rm -f extrafiles + sha256sum $(find * -type f | egrep -v '(pool|i18n|dep11|source)/|Contents-.*\.(gz|diff)|installer|binary-|(In)?Release(.gpg)?|\.changes' | sort) > ${TMPLO} + gpg --no-options --batch --no-tty --armour --personal-digest-preferences=SHA256 --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --clearsign --output ${archiveroot}/extrafiles ${TMPLO} + rm -f ${TMPLO} + done +} + +function startup() { + touch "${DINSTALLSTART}" + ts "startup" + lockfile -l 3600 "${LOCK_DAILY}" + trap onerror ERR + touch "${LOCK_BRITNEY}" + TMPFILES="${TMPFILES} ${LOCK_DAILY} ${LOCK_BRITNEY}" +} + +function postcronscript() { + logstats ${LOGFILE} + state "all done" + touch "${DINSTALLEND}" }