#
# Set $PROGRAM to a string to have it added to the output.
function log () {
- local prefix=${PROGRAM:-}
- echo "$(date +"%b %d %H:%M:%S") $(hostname -s) ${prefix}[$$]: $@"
+ local prefix=${PROGRAM:-}
+ echo "$(date +"%b %d %H:%M:%S") $(hostname -s) ${prefix}[$$]: $@"
}
# log the message using log() but then also send a mail
# to the address configured in MAILTO (if non-empty)
function log_error () {
- log "$@"
- if [ -z "${MAILTO}" ]; then
- echo "$@" | mail -a "X-Debian: DAK" -e -s "[$PROGRAM@$(hostname -s)] ERROR [$$]" ${MAILTO} -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
- fi
+ log "$@"
+ if [ -z "${MAILTO}" ]; then
+ echo "$@" | mail -a "X-Debian: DAK" -e -s "[$PROGRAM@$(hostname -s)] ERROR [$$]" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" ${MAILTO}
+ fi
}
# debug log, only output when DEBUG=1
fi
}
+# Function that only cleans tempfiles, but does not exit or otherwise
+# care about any exit status
+function cleantempfiles() {
+ resolvetmpfiles
+ for TEMPFILE in $TMPFILES; do
+ if [ -n "${TEMPFILE}" ] && [ -f "${TEMPFILE}" ]; then
+ rm -f "${TEMPFILE}"
+ elif [ -n "${TEMPFILE}" ] && [ -d "${TEMPFILE}" ]; then
+ if [ "${TEMPFILE}" != "/" ] && [ "${TEMPFILE}" != "/*" ]; then
+ rm -rf "${TEMPFILE}"
+ fi
+ fi
+ done
+ TMPFILES=""
+}
+
+function resolvetmpfiles() {
+ # If you don't understand this better not touch the script
+ for TEMPFILE in $TEMPFILES; do
+ TMPFILES="${TMPFILES} ${!TEMPFILE:-""}"
+ done
+ TEMPFILES=""
+}
+
+# Function cleanup
+# No arguments
+# Cleans up any known tempfile.
+# Just ensure your script sets the variable
+# TEMPFILES to the names of variables of tempfiles
+# Or TMPFILES to the pathes of tempfiles
+function cleanup() {
+ ERRVAL=$?
+ trap - ERR EXIT TERM HUP INT QUIT
+
+ cleantempfiles
+
+ return $ERRVAL
+}
+TEMPFILES=${TEMPFILES:-""}
+TMPFILES=${TMPFILES:-""}
+
+# Timestamp. Used for dinstall stat graphs
+function ts() {
+ echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
+}
+
+########################################################################
+########################################################################
+
function wbtrigger() {
- SSHOPT="-o BatchMode=yes -o ConnectTimeout=30 -o SetupTimeout=240"
+ SSHOPT="-n -o BatchMode=yes -o ConnectTimeout=30 -o SetupTimeout=240"
if lockfile -r 3 -l 3600 "${LOCK_BUILDD}"; then
- ssh -q -q ${SSHOPT} wbadm@buildd /org/wanna-build/trigger.often
+ ssh -q -q ${SSHOPT} wbadm@buildd /srv/wanna-build/trigger.often
fi
rm -f "${LOCK_BUILDD}"
}
# used by cron.dinstall *and* cron.unchecked.
function make_buildd_dir () {
+ # We generate straight into the static mirror location for incoming
+ log "Preparing buildd area"
dak manage-build-queues -a
dak generate-packages-sources2 -a build-queues
- dak generate-releases -a build-queues
+ dak generate-releases -a build-queues >/dev/null
- for suite in unstable experimental; do
- rm -rf "$incoming/dists/$suite/buildd"
- dak export-suite -s "buildd-$suite" -d "$incoming/dists/$suite/buildd"
- done
- rm -f ${incoming}/public/*
- dak export-suite -s "accepted" -d "$incoming/public"
-
- # export to old build queue directories
- # XXX: Remove once the buildds use the version generated above.
- for suite in $(ls -1 $incoming/dists/); do
- # Skip project trace directory
- if [ "${suite}x" = "projectx" ]; then continue; fi
- cd ${incoming}/dists/${suite}/buildd
-
- apt-ftparchive packages . $overridedir/override.sid.all3 >Packages
- gzip -9c --rsyncable <Packages >Packages.gz
- apt-ftparchive sources . $overridedir/override.sid.all3 >Sources
- gzip -9c --rsyncable <Sources >Sources.gz
-
- rm -f buildd/Release
- cd ..
- apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="Debian" -o APT::FTPArchive::Release::Label="Debian" -o APT::FTPArchive::Release::Description="buildd $suite incoming" -o APT::FTPArchive::Release::Architectures="$archs" release buildd >Release
- if [ "$suite" = "experimental" ]; then
- echo "NotAutomatic: yes" >>Release
- fi
+ # Stick a last modified date in the page footer
+ echo "<p>Last updated: `date -u`</p>" > ${incoming}/web/README.html
- gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o Release.gpg Release
-
- mv Release Release.gpg buildd/
- done
-
- for dist in $(ls -1 ${incoming}/dists/); do
- # Skip project trace directory
- if [ "${dist}x" = "projectx" ]; then continue; fi
- cd ${incoming}/dists/${dist}
- mkdir -p tree/${STAMP}
- cp -al ${incoming}/dists/${dist}/buildd/. tree/${STAMP}/
- ln -sfT tree/${STAMP} ${incoming}/dists/${dist}/current
- find ./tree -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
- done
+ # Tell the mirrors that we've updated
+ log "Pushing static for incoming.d.o"
+ chronic /usr/local/bin/static-update-component incoming.debian.org
}
# Process (oldstable)-proposed-updates "NEW" queue
function punew_do() {
local queue="$1"
local qdir="$2"
+ local to="${3}"
+
date -u -R >> REPORT
- dak process-policy "${queue}" | tee -a REPORT | mail -a "X-Debian: DAK" -e -s "NEW changes in ${queue}" debian-release@lists.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
+ dak process-policy "${queue}" | tee -a REPORT | mail -a "X-Debian: DAK" -e -s "NEW changes in ${queue}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" "${to}"
echo >> REPORT
dak generate-packages-sources2 -s "${queue}"
- local exportdir="${queuedir}/${qdir}/export"
- rm -rf "${exportdir}"
+ STAMP=${STAMP:-$(date "+%Y%m%d%H%M")}
+
+ local exportdir="${qdir}/tree/${STAMP}"
+ local targetdir="${qdir}/export"
+ mkdir -p -- ${exportdir}
dak export -q "${queue}" -d "${exportdir}" --all
+ ln -sfT ${exportdir} ${targetdir}
+ find "${qdir}/tree" -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
}
# These versions used in dinstall
function punew() {
- if [ "${PROGRAM}" = "dinstall" ]; then
- log "Doing automated p-u-new processing"
- fi
+ log "Doing automated p-u-new processing"
cd "${queuedir}/p-u-new"
- punew_do "$1" "p-u-new"
+ punew_do "$1" "${queuedir}/p-u-new" "debian-release@lists.debian.org"
}
function opunew() {
- if [ "${PROGRAM}" = "dinstall" ]; then
- log "Doing automated o-p-u-new processing"
- fi
+ log "Doing automated o-p-u-new processing"
cd "${queuedir}/o-p-u-new"
- punew_do "$1" "o-p-u-new"
+ punew_do "$1" "${queuedir}/o-p-u-new" "debian-release@lists.debian.org"
+}
+
+function backports_policy() {
+ local queue="backports-policy"
+ local qdir="/srv/backports-master.debian.org/queue/policy"
+ local to="backports-team@debian.org"
+
+ log "Doing automated ${queue} processing"
+
+ cd "${qdir}"
+ punew_do "${queue}" "${qdir}" "${to}"
}
# Do the unchecked processing, in case we have files.
function do_unchecked () {
cd $unchecked
- changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
+ changes=$(find . -maxdepth 1 -mindepth 1 -type f \( -name \*.changes -o -name \*.dak-commands \) | sed -e "s,./,," | xargs)
report=$queuedir/REPORT
timestamp=$(date "+%Y-%m-%d %H:%M")
- UNCHECKED_WITHOUT_LOCK=${UNCHECKED_WITHOUT_LOCK:-""}
- echo "$timestamp": ${changes:-"Nothing to do"} >> $report
- dak process-upload -a ${UNCHECKED_WITHOUT_LOCK} -d "$unchecked" >> $report
+ if [ ! -z "$changes" ]; then
+ log "Processing files ${changes}"
+ echo "${timestamp}: ${changes}" >> $report
+ dak process-upload -a -d "$unchecked" >> $report
+ dak process-commands -d "$unchecked" >> $report
+
+ sync_debbugs
+ do_buildd
+ else
+ log "Nothing to do"
+ echo "Nothing to do" >> ${report}
+ fi
}
# process NEW policy queue
function do_new () {
- if [ "${PROGRAM}" = "dinstall" ]; then
- log "Doing NEW processing"
- fi
- (dak process-policy new; dak process-policy byhand) | mail -a "X-Debian: DAK" -e -s "NEW and BYHAND processing" ftpmaster@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
- dak clean-suites -a new
+ log "Doing NEW processing"
+ (dak process-policy new; dak process-policy byhand) | mail -a "X-Debian: DAK" -e -s "NEW and BYHAND processing" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" ftpmaster@ftp-master.debian.org
+
+ log "Processing Backports NEW"
+ dak process-policy backports-new | mail -a "X-Debian: DAK" -e -s "NEW processing for backports-new" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" backports-team@debian.org
+
+ log "Cleanup NEW/Backports NEW"
+ dak clean-suites -a new,backports-new
}
function sync_debbugs () {
# sync with debbugs
+ log "Sync debbugs version tracking information"
echo "--" >> $report
timestamp=$(date "+%Y-%m-%d-%H:%M")
mkdir -p $queuedir/bts_version_track_archive/${timestamp}
rsync -aq $queuedir/bts_version_track/ $queuedir/bts_version_track_archive/${timestamp}
rmdir --ignore-fail-on-non-empty $queuedir/bts_version_track_archive/${timestamp} # remove if empty.
- rsync -aq -e "ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30" --remove-source-files $queuedir/bts_version_track/ bugs-sync:/org/bugs.debian.org/versions/queue/ftp-master/ 2>/dev/null && touch $lockdir/synced_bts_version || true
+ rsync -aq -e "ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30" --remove-source-files $queuedir/bts_version_track/ bugs-sync:/srv/bugs.debian.org/versions/queue/ftp-master/ 2>/dev/null && touch $lockdir/synced_bts_version || true
NOW=$(date +%s)
TSTAMP=$(stat -c %Y $lockdir/synced_bts_version)
DIFF=$(( NOW - TSTAMP ))
if [ $DIFF -ge 259200 ]; then
- log "Kids, you tried your best and you failed miserably. The lesson is, never try. (Homer Simpson)"
+ log_error "Kids, you tried your best and you failed miserably. The lesson is, never try. (Homer Simpson)"
fi
}
function clean_debbugs () {
+ log "Cleanup debbugs"
# Delete files older than 60 days
find $queuedir/bts_version_track_archive/ -mtime +60 -type f -delete
# Delete empty directories
function reports() {
# Send a report on NEW/BYHAND packages
log "Nagging ftpteam about NEW/BYHAND packages"
- dak queue-report | mail -a "X-Debian: DAK" -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
+ dak queue-report | mail -a "X-Debian: DAK" -e -s "NEW and BYHAND on $(date +%D)" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" ftpmaster@ftp-master.debian.org
+ dak queue-report -d backports-new,backports-policy | mail -a "X-Debian: DAK" -e -s "NEW and POLICY on $(date +%D)" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" backports-team@debian.org
# and one on crufty packages
log "Sending information about crufty packages"
dak cruft-report -R > $webdir/cruft-report-daily.txt
dak cruft-report -R -s experimental >> $webdir/cruft-report-daily.txt
- cat $webdir/cruft-report-daily.txt | mail -a "X-Debian: DAK" -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
+ cat $webdir/cruft-report-daily.txt | mail -a "X-Debian: DAK" -e -s "Debian archive cruft report for $(date +%D)" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" ftpmaster@ftp-master.debian.org
}
function pg_timestamp() {
log "Saving postgres transaction id for ${tsname}"
psql -tAc 'select txid_current();' > $base/backup/txid_${tsname}_$(date +%Y.%m.%d-%H:%M:%S)
}
+
+function get_archiveroot() {
+ local archivename="$1"
+ local query="SELECT path FROM archive WHERE name='${archivename}'"
+ local archiveroot="$(psql -tAc "${query}")"
+ if [ -z "${archiveroot}" ]; then
+ echo "get_archiveroot: couldn't get archiveroot for '${archivename}'" >&2
+ return 1
+ fi
+ echo "${archiveroot}"
+}
+
+# Prepare the trees for buildds, then push wanna-build
+function do_buildd() {
+ if lockfile -r3 ${LOCK_DAILY}; then
+ TMPFILES="${TMPFILES} ${LOCK_DAILY}"
+ make_buildd_dir
+ wbtrigger
+ fi
+}
+
+# Cleanup policy queues
+function cleanpolicy() {
+ dak clean-suites -a backports-policy,policy
+}
+
+# Scan new packages for contents
+function scancontents() {
+ dak contents -l 10000 scan-binary
+ dak contents -l 1000 scan-source
+}
+
+function ddaccess() {
+ # Tell our dd accessible mirror to sync itself up.
+ log "Trigger dd accessible parts sync"
+ ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
+}
+
+
+
+########################################################################
+########################################################################
+########################################################################
+########################################################################
+
+# Function to save which stage we are in, so we can restart an interrupted
+# dinstall. Or even run actions in parallel, if we dare to, by simply
+# backgrounding the call to this function. But that should only really be
+# done for things we don't care much about.
+#
+# This should be called with the first argument being an array, with the
+# members
+# - FUNC - the function name to call
+# - ARGS - Possible arguments to hand to the function. Can be the empty string
+# - TIME - The timestamp name. Can be the empty string
+# - ERR - if this is the string false, then the call will be surrounded by
+# set +e ... set -e calls, so errors in the function do not exit
+# dinstall. Can be the empty string, meaning true.
+#
+# MAKE SURE TO KEEP THIS THE LAST FUNCTION, AFTER ALL THE VARIOUS ONES
+# ADDED FOR DINSTALL FEATURES!
+function stage() {
+ ARGS='GO[@]'
+ local "${!ARGS}"
+
+ local error=${ERR:-"true"}
+
+ ARGS=${ARGS:-""}
+
+ log "########## ${PROGRAM} BEGIN: ${FUNC} ${ARGS} ##########"
+ local STAGEFILE="${stagedir}/${FUNC}${ARGS:+_}${ARGS}"
+ STAGEFILE=${STAGEFILE// /_}
+ if [ -f "${STAGEFILE}" ]; then
+ local stamptime=$(/usr/bin/stat -c %Z "${STAGEFILE}")
+ local unixtime=$(date +%s)
+ local difference=$(( $unixtime - $stamptime ))
+ if [ ${difference} -ge 14400 ]; then
+ log_error "Did already run ${FUNC}, stagefile exists, but that was ${difference} seconds ago. Please check."
+ else
+ log "Did already run ${FUNC}, not calling again..."
+ fi
+ return
+ fi
+
+ debug "Now calling function ${FUNC}. Arguments: ${ARGS}. Timestamp: ${TIME}"
+
+ # Make sure we are always at the same place. If a function wants
+ # to be elsewhere, it has to cd first!
+ cd ${configdir}
+
+ # Now redirect the output into $STAGEFILE.log. In case it errors
+ # out somewhere our errorhandler trap can then mail the contents
+ # of $STAGEFILE.log only, instead of a whole ${PROGRAM} logfile.
+ # Short error mails ftw!
+ exec >> "${STAGEFILE}.log" 2>&1
+
+ if [ -f "${LOCK_STOP}" ]; then
+ log "${LOCK_STOP} exists, exiting immediately"
+ exit 42
+ fi
+
+ # Do we care about trouble in the function we call?
+ if [ "${error}" = "false" ]; then
+ set +e
+ fi
+ ${FUNC} ${ARGS}
+
+ # No matter what happened in the function, we make sure we have
+ # set -e default state back
+ set -e
+
+ # Make sure we are always at the same place.
+ cd ${configdir}
+
+ # We always use the same umask. If a function wants to do
+ # different, fine, but we reset.
+ umask 022
+
+ touch "${STAGEFILE}"
+
+ if [ -n "${TIME}" ]; then
+ ts "${TIME}"
+ fi
+
+ # And the output goes back to the normal logfile
+ exec >> "${LOGFILE}" 2>&1
+
+ # Now we should make sure that we have a usable ${PROGRAM}.log, so
+ # append the $STAGEFILE.log to it.
+ cat "${STAGEFILE}.log" >> "${LOGFILE}"
+ rm -f "${STAGEFILE}.log"
+
+ log "########## ${PROGRAM} END: ${FUNC} ##########"
+
+ if [ -f "${LOCK_STOP}" ]; then
+ log "${LOCK_STOP} exists, exiting immediately"
+ exit 42
+ fi
+}