#
# Set $PROGRAM to a string to have it added to the output.
function log () {
- local prefix=${PROGRAM:-}
- echo "$(date +"%b %d %H:%M:%S") $(hostname -s) ${prefix}[$$]: $@"
+ local prefix=${PROGRAM:-}
+ echo "$(date +"%b %d %H:%M:%S") $(hostname -s) ${prefix}[$$]: $@"
}
# log the message using log() but then also send a mail
# to the address configured in MAILTO (if non-empty)
function log_error () {
- log "$@"
- if [ -z "${MAILTO}" ]; then
- echo "$@" | mail -a "X-Debian: DAK" -e -s "[$PROGRAM@$(hostname -s)] ERROR [$$]" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" ${MAILTO}
- fi
+ log "$@"
+ if [ -z "${MAILTO}" ]; then
+ echo "$@" | mail -a "X-Debian: DAK" -e -s "[$PROGRAM@$(hostname -s)] ERROR [$$]" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" ${MAILTO}
+ fi
}
# debug log, only output when DEBUG=1
# exit on errors
set -e
+# A pipeline's return status is the value of the last (rightmost)
+# command to exit with a non-zero status, or zero if all commands exit
+# successfully.
set -o pipefail
# make sure to only use defined variables
set -u
set -E
# And use one locale, no matter what the caller has set
-export LANG=C
-export LC_ALL=C
+export LANG=C.UTF-8
+export LC_ALL=C.UTF-8
-ARG=${1:-"meh"}
+# One arg please
+declare -lr ARG=${1:-"meh"}
# set DEBUG if you want to see a little more logs (needs to be used more)
DEBUG=${DEBUG:-0}
-# While this check can be done in the following case, some assumptions
-# down there are easier if we sorted out calls without an arg before.
+# This doesn't catch calling us with an unknown argument, but it
+# catches missing args and saves a good bunch of processing time
+# (reading the scriptvars later is slow)
if [[ ${ARG} == meh ]]; then
cat - <<EOF
This is the FTPMaster cronscript. It needs an argument or it won't do
exit 0
fi
-# Make sure we start out with a sane umask setting
-umask 022
-
# import the general variable set.
export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars
. $SCRIPTVARS
. "${configdir}/common"
# program name is the (lower cased) first argument.
-PROGRAM="${ARG,,}"
+PROGRAM="${ARG}"
# Timestamp when we started
NOW=$(date "+%Y.%m.%d-%H:%M:%S")
# Which list of tasks should we run?
-TASKLIST="${configdir}/${PROGRAM}.tasks"
+declare -r TASKLIST="${configdir}/${PROGRAM}.tasks"
# A logfile for every cron script
LOGFILE="${logdir}/${PROGRAM}_${NOW}.log"
# Get rid of tempfiles at the end
trap cleanup EXIT TERM HUP INT QUIT
-case ${ARG,,} in
+case ${ARG} in
unchecked)
# Do not run during dinstall
if [[ -e ${LOCK_DAILY} ]]; then
# and ensure its no longer used
exec > "$logdir/after${PROGRAM}.log" 2>&1
-case ${ARG,,} in
+case ${ARG} in
unchecked)
;;
dinstall)
# our stage files, so the next dinstall run will do it all again.
rm -f ${stagedir}/*
bzip2 -9 ${LOGFILE}
+
+# Logfile should be gone, remove the symlink
+[[ -L ${logdir}/${PROGRAM} ]] && [[ ! -f ${logdir}/${PROGRAM} ]] && rm -f ${logdir}/${PROGRAM} || log "Logfile still exists or symlink gone already? Something fishy going on"
+
# FIXME: Mail the log when its non-empty
[[ -s "${logdir}/after${PROGRAM}.log" ]] || rm "${logdir}/after${PROGRAM}.log"
# get the latest list of wnpp bugs and their source packages
function wnppbugs() {
- TMPFILE=$( mktemp -p ${TMPDIR} )
- TMPFILES="${TMPFILES} ${TMPFILE}"
- wget -q -O${TMPFILE} --ca-directory=/etc/ssl/ca-debian https://qa.debian.org/data/bts/wnpp_rm
- chmod go+r ${TMPFILE}
- mv ${TMPFILE} /srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm
+ log "Fetching latest list of wnpp bugs"
+ TMPLIST=$( mktemp -p ${TMPDIR} )
+ TMPFILES="${TMPFILES} ${TMPLIST}"
+ wget -q -O${TMPLIST} --ca-directory=/etc/ssl/ca-debian https://qa.debian.org/data/bts/wnpp_rm
+ chmod go+r ${TMPLIST}
+ mv ${TMPLIST} ${scriptdir}/masterfiles/wnpp_rm
}
# Push files over to security
function pushfilesdb() {
+ log "Pushing files table to security"
# The key over there should have the following set for the ssh key:
# command="/usr/bin/xzcat | /usr/bin/psql -1 -c 'DELETE FROM external_files; COPY external_files (id, filename, size, md5sum, last_used, sha1sum, sha256sum, created, modified) FROM STDIN' obscurity"
psql -c 'COPY files (id, filename, size, md5sum, last_used, sha1sum, sha256sum, created, modified) TO STDOUT' projectb | \
# Update wanna-build dump
function wbdump() {
log "Update wanna-build database dump"
- $base/dak/scripts/nfu/get-w-b-db
+ ${masterdir}/scripts/nfu/get-w-b-db
}
+
# Generate list of override disparities
function overridedisp() {
+ log "Generating list of override disparities"
dak override-disparity | gzip -9 > ${webdir}/override-disparity.gz
}
# Generate stats about the new queue
function newstats() {
+ log "Generating stats about the new queue"
dak stats new ${webdir}/NEW-stats.yaml 2> /dev/null
}
# Post with curl as a workaround for #801506
# See https://wiki.debian.org/ServicesSSL#curl
dir=/etc/ssl/ca-debian
- test -d $dir && capath="--capath $dir"
- curl -s $capath https://contributors.debian.org/contributors/post \
+ test -d ${dir} && capath="--capath ${dir}"
+ curl -s ${capath} https://contributors.debian.org/contributors/post \
-F source=ftp.debian.org \
-F auth_token="$(cat ${base}/s3kr1t/contributor.auth)" \
-F data=@${TMPCNTB} > ${TMPCNTB}.result
+ log "Results:"
cat ${TMPCNTB}.result
+ log "----"
rm -f ${TMPCNTB}.result
}
-
-function linkmorgue() {
- ${scriptsdir}/link_morgue.sh
-}
ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
for a in $ARCHES; do
(sed -n "s/|$a$//p" $ARCHLIST
- sed -n 's/|all$//p' $ARCHLIST
+ sed -n 's/|all$//p' $ARCHLIST
- cd $base/ftp
- find ./dists -maxdepth 1 \! -type d
- find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
+ cd $base/ftp
+ find ./dists -maxdepth 1 \! -type d
+ find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
) | sort -u | gzip -9 > arch-$a.list.gz
done
}
psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
- while read id suite; do
- [ -e $base/ftp/dists/$suite ] || continue
- (
- (cd $base/ftp
- distname=$(cd dists; readlink $suite || echo $suite)
- find ./dists/$distname \! -type d
- for distdir in ./dists/*; do
- [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
- done
- )
- suite_list $id
- ) | sort -u | gzip -9 > suite-${suite}.list.gz
- done
+ while read id suite; do
+ [ -e $base/ftp/dists/$suite ] || continue
+ (
+ (cd $base/ftp
+ distname=$(cd dists; readlink $suite || echo $suite)
+ find ./dists/$distname \! -type d
+ for distdir in ./dists/*; do
+ [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
+ done
+ )
+ suite_list $id
+ ) | sort -u | gzip -9 > suite-${suite}.list.gz
+ done
log "Finding everything on the ftp site to generate sundries"
(cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
rm -f sundries.list
zcat *.list.gz | cat - *.list | sort -u |
- diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
+ diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
log "Generating files list"
for a in $ARCHES; do
(echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
- cat - sundries.list dists.list project.list docs.list indices.list |
- sort -u | poolfirst > ../arch-$a.files
+ cat - sundries.list dists.list project.list docs.list indices.list |
+ sort -u | poolfirst > ../arch-$a.files
done
(cd $base/ftp/
- for dist in sid jessie stretch; do
- find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
- done
+ for dist in sid jessie stretch; do
+ find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
+ done
)
(cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
- sort -u | poolfirst > ../typical.files
+ sort -u | poolfirst > ../typical.files
rm -f $ARCHLIST
log "Done!"
function newoverview() {
# do not run show-new and other stuff in parallel
if lockfile -r16 $LOCK_UNCHECKED 2> /dev/null; then
+ log "Creating new overview"
TMPFILES="${TMPFILES} ${LOCK_UNCHECKED}"
do_new
dak show-new > /dev/null || true
+ cleantempfiles
fi
- cleantempfiles
}
function queuereport() {
+ log "Creating queue reports, new/backports-new, 822 format"
dak queue-report -n > ${webdir}/new.html
dak queue-report -n -d backports-new,backports-policy > ${webdir}/backports-new.html
dak queue-report -8 -d new,byhand,stable-new,oldstable-new,backports-new -r $webdir/stat
+ log "Deferred queue overview"
sudo -u dak-unpriv dak show-deferred -r ${webdir}/stat > ${webdir}/deferred.html
- dak graph -n new,byhand,stable-new,oldstable-new,deferred -r ${webdir}/stat -i ${webdir}/stat -x $scriptsdir/rrd-release-freeze-dates
+ log "Graphs about the queues"
+ dak graph -n new,byhand,stable-new,oldstable-new,deferred,backports-new -r ${webdir}/stat -i ${webdir}/stat -x $scriptsdir/rrd-release-freeze-dates
}
function removalstxt() {
}
function rss() {
+ log "NEW RSS feed"
$base/dak/tools/queue_rss.py -q $queuedir/new -o ${webdir}/rss/ -d $base/misc -l $base/log/
+ log "Removals RSS feed"
$base/dak/tools/removals.pl $configdir/removalsrss.rc > ${webdir}/rss/removals.rss
}
# -*- mode:sh -*-
-# locations used by many scripts
+umask 022
+unset CDPATH
+GZIP='--rsyncable' ; export GZIP
+
+# locations used by many scripts
base=/srv/ftp-master.debian.org
-public=/srv/ftp.debian.org
-bindir=$base/bin
-ftpdir=$base/ftp
-webdir=$public/web
-indices=$ftpdir/indices
-archs=$(dak admin a list | tr '\n' ' ')
+bindir=${base}/bin
+masterdir=${base}/dak/
+queuedir=${base}/queue/
+scriptdir=${base}/scripts
+
+configdir=${masterdir}/config/debian/
+dbdir=${base}/database/
+exportdir=${base}/export/
+extoverridedir=${scriptdir}/external-overrides
+ftpdir=${base}/ftp
+incoming=${base}/public/incoming.debian.org/
+indices=${ftpdir}/indices
+lockdir=${base}/lock/
+logdir=${base}/log/cron/
+mirrordir=${base}/mirror/
+
+accepted=${queuedir}/accepted/
+newstage=${queuedir}/newstage/
+overridedir=${scriptdir}/override
+scriptsdir=${masterdir}/scripts/debian/
+stagedir=${lockdir}/stages
+unchecked=${queuedir}/unchecked/
-scriptdir=$base/scripts
-masterdir=$base/dak/
-configdir=$base/dak/config/debian/
-scriptsdir=$base/dak/scripts/debian/
-dbdir=$base/database/
-lockdir=$base/lock/
-stagedir=$lockdir/stages
-overridedir=$scriptdir/override
-extoverridedir=$scriptdir/external-overrides
-logdir=$base/log/cron/
-
-queuedir=$base/queue/
-unchecked=$queuedir/unchecked/
-accepted=$queuedir/accepted/
-mirrordir=$base/mirror/
-incoming=$base/public/incoming.debian.org/
-newstage=$queuedir/newstage/
-exportdir=$base/export/
-exportpublic=$public/rsync/export/
+public=/srv/ftp.debian.org
+webdir=${public}/web
+exportpublic=${public}/rsync/export/
ftpgroup=debadmin
-
public_archives=(ftp-master backports debian-debug)
TMPDIR=${base}/tmp
+PATH=${masterdir}:${PATH}
-PATH=$masterdir:$PATH
-umask 022
-unset CDPATH
-GZIP='--rsyncable' ; export GZIP
+# Various lockfiles used by multiple cron scripts
+LOCK_DAILY="${lockdir}/daily.lock"
+LOCK_UNCHECKED="${lockdir}/unchecked.lock"
+LOCK_STOP="${lockdir}/archive.stop"
-# lock cron.unchecked (it immediately exits when this exists)
-LOCK_DAILY="$lockdir/daily.lock"
-LOCK_UNCHECKED="$lockdir/unchecked.lock"
-LOCK_STOP="$lockdir/archive.stop"
+archs=$(dak admin a list | tr '\n' ' ')
# Set the database variables
eval $(dak admin config db-shell)
symlinks -d -r ${archiveroot}
done
}
+
+function linkmorgue() {
+ ${scriptsdir}/link_morgue.sh
+}