]> git.decadent.org.uk Git - dak.git/blobdiff - config/debian/dinstall.functions
Architecture names can have more than one character
[dak.git] / config / debian / dinstall.functions
old mode 100644 (file)
new mode 100755 (executable)
index fcd2de3..9ca4433
@@ -1,14 +1,4 @@
 # -*- mode:sh -*-
-# Timestamp. Used for dinstall stat graphs
-function ts() {
-        echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
-}
-
-# Remove daily lock
-# FIXME: Dead code when switched to cronscript
-function remove_daily_lock() {
-    rm -f $LOCK_DAILY
-}
 
 # Remove changelog lock
 function remove_changelog_lock() {
@@ -20,30 +10,32 @@ function remove_all_locks() {
     rm -f $LOCK_DAILY $LOCK_ACCEPTED
 }
 
+# Get rid of all locks and unset the trap
 function remove_locks {
     remove_all_locks
     trap - EXIT TERM HUP INT QUIT
     ts "locked part finished"
 }
 
+# Lock accepted
 function lockaccepted {
     lockfile "$LOCK_ACCEPTED"
     trap remove_all_locks EXIT TERM HUP INT QUIT
 }
 
-# If we error out this one is called, *FOLLOWED* by cleanup above
+# If we error out this one is called, *FOLLOWED* by cleanup in common
 function onerror() {
     ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
 
     subject="ATTENTION ATTENTION!"
-    if [ "${error}" = "false" ]; then
+    if [[ ${error} = false  ]]; then
         subject="${subject} (continued)"
     else
         subject="${subject} (interrupted)"
     fi
     subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
 
-    if [ -r "${STAGEFILE}.log" ]; then
+    if [[ -r ${STAGEFILE}.log  ]]; then
         cat "${STAGEFILE}.log"
     else
         echo "file ${STAGEFILE}.log does not exist, sorry"
@@ -60,14 +52,48 @@ function qa1() {
     ssh -n -2 -i ~dak/.ssh/push_merkel_qa  -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
 }
 
+function mirrorlists() {
+    local mldir="${scriptdir}/mirrorlist"
+    local masterlist=${mldir}/Mirrors.masterlist
+
+    cd ${mldir}
+
+    [[ -f ${HOME}/.cvspass ]] || touch ${HOME}/.cvspass
+
+    grep -q "anonscm.debian.org:/cvs/webwml" ~/.cvspass || \
+       echo ":pserver:anonymous@anonscm.debian.org:/cvs/webwml A" >> ${HOME}/.cvspass
+
+    cvs update
+
+    if [[ ! -f ${ftpdir}/README.mirrors.html ]] || [[ ${masterlist} -nt ${ftpdir}/README.mirrors.html ]]; then
+       rm -f ${ftpdir}/README.mirrors.{html,txt}
+       ${mldir}/mirror_list.pl -m ${masterlist} -t html > ${ftpdir}/README.mirrors.html
+       ${mldir}/mirror_list.pl -m ${masterlist} -t text > ${ftpdir}/README.mirrors.txt
+       log Updated archive version of mirrors file
+    fi
+}
+
+function mailingliststxt() {
+    cd ${ftpdir}/doc
+
+    log "Updating archive version of mailing-lists.txt"
+    wget ${wgetopts} https://www.debian.org/misc/mailing-lists.txt
+}
+
+function pseudopackages() {
+    cd ${scriptdir}/masterfiles
+
+    log "Updating archive version of pseudo-packages"
+    for file in maintainers description; do
+       wget ${wgetopts} https://bugs.debian.org/pseudopackages/pseudo-packages.${file}
+    done
+}
+
 # Updating various files
-function updates() {
-    log "Updating Bugs docu, Mirror list and mailing-lists.txt"
+function bugdoctxt() {
+    log "Updating Bugs docu"
     cd $configdir
     $scriptsdir/update-bugdoctxt
-    $scriptsdir/update-mirrorlists
-    $scriptsdir/update-mailingliststxt
-    $scriptsdir/update-pseudopackages.sh
 }
 
 # The first i18n one, syncing new descriptions
@@ -79,17 +105,17 @@ function i18n1() {
 
     # Now check if we still know about the packages for which they created the files
     # is the timestamp signed by us?
-    if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
+    if gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp; then
         # now read it. As its signed by us we are sure the content is what we expect, no need
         # to do more here. And we only test -d a directory on it anyway.
         TSTAMP=$(cat timestamp)
         # do we have the dir still?
-        if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
+        if [[ -d ${scriptdir}/i18n/${TSTAMP}  ]]; then
             # Lets check!
             if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
                 # Yay, worked, lets copy around
                 for dir in ${extimportdists}; do
-                    if [ -d dists/${dir}/ ]; then
+                    if [[ -d dists/${dir}/  ]]; then
                         cd dists/${dir}/main/i18n
                         rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
                     fi
@@ -122,11 +148,11 @@ function dep11() {
     if ${scriptsdir}/dep11-basic-validate.py . ${dep11dir}/; then
         # Yay, worked, lets copy around
         for dir in ${extimportdists}; do
-            if [ -d ${dir}/ ]; then
+            if [[ -d ${dir}/  ]]; then
                 for comp in main contrib non-free; do
                     mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
                     cd ${dir}/${comp}
-                    rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
+                    rsync -aq --delete --delete-after --exclude ./*.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
                     cd ${dep11dir}
                 done
             fi
@@ -158,10 +184,10 @@ function fingerprints() {
     log "Updating fingerprints"
     dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
 
-    OUTFILE=$(mktemp)
+    OUTFILE=$( gettempfile )
     dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
 
-    if [ -s "${OUTFILE}" ]; then
+    if [[ -s ${OUTFILE} ]]; then
         /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
 To: <debian-project@lists.debian.org>
@@ -193,13 +219,13 @@ function mpfm() {
 
     log "Generating package / file mapping"
     for archive in "${public_archives[@]}"; do
+        log "  archive: ${archive}"
         archiveroot="$(get_archiveroot "${archive}")"
         dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
     done
 }
 
 function packages() {
-    log "Generating Packages and Sources files"
     for archive in "${public_archives[@]}"; do
         log "  Generating Packages/Sources for ${archive}"
         dak generate-packages-sources2 -a "${archive}"
@@ -216,6 +242,7 @@ function pdiff() {
 function release() {
     log "Generating Release files"
     for archive in "${public_archives[@]}"; do
+        log "  archive: ${archive}"
         dak generate-releases -a "${archive}"
     done
 }
@@ -255,7 +282,7 @@ function mkmaintainers() {
     for archive in "${public_archives[@]}"; do
         archiveroot="$(get_archiveroot "${archive}")"
        indices="${archiveroot}/indices"
-       if ! [ -d "${indices}" ]; then
+       if ! [[ -d ${indices} ]]; then
            mkdir "${indices}"
        fi
         cd "${indices}"
@@ -322,11 +349,11 @@ function mkfilesindices() {
     ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
     for a in $ARCHES; do
         (sed -n "s/|$a$//p" $ARCHLIST
-            sed -n 's/|all$//p' $ARCHLIST
+         sed -n 's/|all$//p' $ARCHLIST
 
-            cd $base/ftp
-            find ./dists -maxdepth 1 \! -type d
-            find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
+         cd $base/ftp
+         find ./dists -maxdepth 1 \! -type d
+         find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
         ) | sort -u | gzip -9 > arch-$a.list.gz
     done
 
@@ -373,43 +400,43 @@ function mkfilesindices() {
     }
 
     psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
-    while read id suite; do
-        [ -e $base/ftp/dists/$suite ] || continue
-        (
-            (cd $base/ftp
-                distname=$(cd dists; readlink $suite || echo $suite)
-                find ./dists/$distname \! -type d
-                for distdir in ./dists/*; do
-                    [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
-                done
-            )
-            suite_list $id
-        ) | sort -u | gzip -9 > suite-${suite}.list.gz
-    done
+        while read id suite; do
+            [[ -e $base/ftp/dists/$suite ]] || continue
+            (
+                (cd $base/ftp
+                 distname=$(cd dists; readlink $suite || echo $suite)
+                 find ./dists/$distname \! -type d
+                 for distdir in ./dists/*; do
+                     [[ $(readlink $distdir) != $distname ]] || echo $distdir
+                 done
+                )
+                suite_list $id
+            ) | sort -u | gzip -9 > suite-${suite}.list.gz
+        done
 
     log "Finding everything on the ftp site to generate sundries"
     (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
 
     rm -f sundries.list
     zcat *.list.gz | cat - *.list | sort -u |
-    diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
+        diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
 
     log "Generating files list"
 
     for a in $ARCHES; do
         (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
-        cat - sundries.list dists.list project.list docs.list indices.list |
-        sort -u | poolfirst > ../arch-$a.files
+            cat - sundries.list dists.list project.list docs.list indices.list |
+            sort -u | poolfirst > ../arch-$a.files
     done
 
     (cd $base/ftp/
-           for dist in sid jessie stretch; do
-                   find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
-           done
+     for dist in sid jessie stretch; do
+        find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
+     done
     )
 
     (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
-    sort -u | poolfirst > ../typical.files
+        sort -u | poolfirst > ../typical.files
 
     rm -f $ARCHLIST
     log "Done!"
@@ -445,15 +472,22 @@ function mirror() {
         log "Regenerating \"public\" mirror/${archive} hardlink fun"
         DATE_SERIAL=$(date +"%Y%m%d01")
         FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} || echo ${DATE_SERIAL} )
-        if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
-            SERIAL="$DATE_SERIAL"
+        if [[ ${DATE_SERIAL} -gt ${FILESOAPLUS1}  ]]; then
+            SERIAL="${DATE_SERIAL}"
         else
-            SERIAL="$FILESOAPLUS1"
+            SERIAL="${FILESOAPLUS1}"
         fi
+        RFC822DATE=$(LC_ALL=POSIX LANG=POSIX date -u -R)
         date -u > ${TRACEFILE}
-        echo "Using dak v1" >> ${TRACEFILE}
-        echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
-        echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
+        {
+            echo "Using dak v1"
+            echo "Running on host: $(hostname -f)"
+            echo "Archive serial: ${SERIAL}"
+            echo "Date: ${RFC822DATE}"
+        } >> ${TRACEFILE}
+        # Now make it accessible via one name, no matter on which host we run
+        cd ${archiveroot}/project/trace/
+        ln -sf ftp-master.debian.org master
 
         mkdir -p ${targetpath}
         cd ${targetpath}
@@ -463,19 +497,19 @@ function mirror() {
 
 function expire() {
     log "Expiring old database dumps..."
-    cd $base/backup
-    $scriptsdir/expire_dumps -d . -p -f "dump_*"
+    cd ${base}/backup
+    ${scriptsdir}/expire_dumps -d . -p -f "dump_*"
 }
 
 function transitionsclean() {
     log "Removing out of date transitions..."
-    cd $base
+    cd ${base}
     dak transitions -c -a
 }
 
 function dm() {
     log "Updating DM permissions page"
-    dak acl export-per-source dm >$exportdir/dm.txt
+    dak acl export-per-source dm >${exportdir}/dm.txt
 }
 
 function bts() {
@@ -499,9 +533,9 @@ function mirrorpush() {
             echo "Processing: ${release}"
             subdir=${release%/InRelease}
             while read SHASUM SIZE NAME; do
-                if ! [ -f "${subdir}/${NAME}" ]; then
+                if ! [[ -f ${subdir}/${NAME} ]]; then
                     bname=$(basename ${NAME})
-                    if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-(amd64|i386)\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
+                    if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-[a-zA-Z0-9-]+\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
 
                         # We don't keep unpacked files, don't check for their existance.
                         # We might want to go and check their unpacked shasum, but right now
@@ -518,7 +552,7 @@ function mirrorpush() {
                 # So we use "readlink -f" to check the size of the target, as thats basically
                 # what gen-releases does
                 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
-                if [ ${fsize} -ne ${SIZE} ]; then
+                if [[ ${fsize} -ne ${SIZE} ]]; then
                     broken=$(( broken + 1 ))
                     echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
                     continue
@@ -526,7 +560,7 @@ function mirrorpush() {
 
                 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
                 fshasum=${fshasum%% *}
-                if [ "${fshasum}" != "${SHASUM}" ]; then
+                if [[ ${fshasum} != ${SHASUM} ]]; then
                     broken=$(( broken + 1 ))
                     echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
                     continue
@@ -534,7 +568,7 @@ function mirrorpush() {
             done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
         done
 
-        if [ $broken -gt 0 ]; then
+        if [[ $broken -gt 0 ]]; then
             log_error "Trouble with the public mirror for ${archive}, found ${broken} errors"
             continue
         else
@@ -543,6 +577,7 @@ function mirrorpush() {
                 ftp-master)
                     fname="mirrorstart"
                     pusharg=""
+                    pname="debian"
                     ;;
                 debian-debug)
                     pusharg="-a debug"
@@ -552,11 +587,15 @@ function mirrorpush() {
                     ;;&
                 *)
                     fname="mirrorstart.${archive}"
+                    pname=${archive}
                     ;;
             esac
-            date -u > /srv/ftp.debian.org/web/${fname}
-            echo "Using dak v1" >> /srv/ftp.debian.org/web/${fname}
-            echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/${fname}
+            mkdir -p ${webdir}/${pname}/project/trace/
+            ln -sf ${mirrordir}/${archive}/project/trace/master ${webdir}/${pname}/project/trace/master
+
+            date -u > ${webdir}/${fname}
+            echo "Using dak v1" >> ${webdir}/${fname}
+            echo "Running on host $(hostname -f)" >> ${webdir}/${fname}
             sudo -H -u archvsync /home/archvsync/runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 &
         fi
     done
@@ -591,21 +630,21 @@ function i18n2() {
 
 function stats() {
     log "Updating stats data"
-    cd $configdir
-    $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
-    R --slave --vanilla < $base/misc/ftpstats.R
-    dak stats arch-space > $webdir/arch-space
-    dak stats pkg-nums > $webdir/pkg-nums
+    cd ${configdir}
+    ${scriptsdir}/update-ftpstats ${base}/log/* > ${base}/misc/ftpstats.data
+    R --slave --vanilla < ${base}/misc/ftpstats.R
+    dak stats arch-space > ${webdir}/arch-space
+    dak stats pkg-nums > ${webdir}/pkg-nums
 }
 
 function cleantransactions() {
     log "Cleanup transaction ids older than 3 months"
-    cd $base/backup/
+    cd ${base}/backup/
     find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
 }
 
 function logstats() {
-    $masterdir/tools/logs.py "$1"
+    ${masterdir}/tools/logs.py "$1"
 }
 
 # save timestamp when we start
@@ -614,7 +653,7 @@ function savetimestamp() {
 }
 
 function maillogfile() {
-    cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
+    mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org < "${LOGFILE}"
 }
 
 function testingsourcelist() {
@@ -638,22 +677,22 @@ EOF
 
 # extract changelogs and stuff
 function changelogs() {
-    if lockfile -r3 $LOCK_CHANGELOG; then
+    if lockfile -r3 ${LOCK_CHANGELOG}; then
+        trap remove_changelog_lock EXIT TERM HUP INT QUIT
         log "Extracting changelogs"
         dak make-changelog -e -a ftp-master
-        [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
+        [[ -f ${exportdir}/changelogs/filelist.yaml ]] && xz -f ${exportdir}/changelogs/filelist.yaml
         mkdir -p ${exportpublic}/changelogs
         cd ${exportpublic}/changelogs
         rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
         sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
 
         dak make-changelog -e -a backports
-        [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
+        [[ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml  ]] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
         mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
         cd /srv/backports-master.debian.org/rsync/export/changelogs
         rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
         remove_changelog_lock
-        trap remove_changelog_lock EXIT TERM HUP INT QUIT
     fi
 }
 
@@ -666,8 +705,7 @@ function signotherfiles() {
     for archive in "${public_archives[@]}"; do
         log "... archive: ${archive}"
         archiveroot="$(get_archiveroot "${archive}")"
-        local TMPLO=$( mktemp -p ${TMPDIR} )
-        trap "rm -f ${TMPLO}" ERR EXIT TERM HUP INT QUIT
+        local TMPLO=$( gettempfile )
 
         cd ${archiveroot}
         rm -f extrafiles
@@ -680,9 +718,14 @@ function signotherfiles() {
 function startup() {
     touch "${DINSTALLSTART}"
     ts "startup"
-    DINSTALLBEGIN="$(date -u +"%a %b %d %T %Z %Y (%s)")"
     lockfile -l 3600 "${LOCK_DAILY}"
     trap onerror ERR
-    TEMPFILES="${TEMPFILES} ${LOCK_DAILY}"
     touch "${LOCK_BRITNEY}"
+    TMPFILES="${TMPFILES} ${LOCK_DAILY} ${LOCK_BRITNEY}"
+}
+
+function postcronscript() {
+    logstats ${LOGFILE}
+    state "all done"
+    touch "${DINSTALLEND}"
 }