2 # log something (basically echo it together with a timestamp)
4 # Set $PROGRAM to a string to have it added to the output.
6 local prefix=${PROGRAM:-}
7 echo "$(date +"%b %d %H:%M:%S") $(hostname -s) ${prefix}[$$]: $@"
10 # log the message using log() but then also send a mail
11 # to the address configured in MAILTO (if non-empty)
12 function log_error () {
14 if [ -z "${MAILTO}" ]; then
15 echo "$@" | mail -a "X-Debian: DAK" -e -s "[$PROGRAM@$(hostname -s)] ERROR [$$]" ${MAILTO} -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
19 # debug log, only output when DEBUG=1
21 if [ $DEBUG -eq 1 ]; then
26 function wbtrigger() {
27 SSHOPT="-o BatchMode=yes -o ConnectTimeout=30 -o SetupTimeout=240"
28 if lockfile -r 3 -l 3600 "${LOCK_BUILDD}"; then
29 ssh -q -q ${SSHOPT} wbadm@buildd /org/wanna-build/trigger.often
31 rm -f "${LOCK_BUILDD}"
34 # used by cron.dinstall *and* cron.unchecked.
35 function make_buildd_dir () {
39 # We generate straight into the static mirror location for incoming
40 dak manage-build-queues -a
41 dak generate-packages-sources2 -a build-queues
42 dak generate-releases -a build-queues >/dev/null
44 # And set up all of the top level symlinks people seem to like
45 find ${incoming}/web -maxdepth 1 -type l -delete
46 dak export-suite -r -s "accepted" -d "$incoming/web"
48 # For now, we still create the local incoming.d.o setup
49 ${scriptsdir}/update-buildd-archive ${incoming}/web/debian-buildd ${incoming}/debian-buildd
50 rm -f ${incoming}/public/*
51 dak export-suite -s "accepted" -d "$incoming/public"
53 # Tell the mirrors that we've updated
54 /usr/local/bin/static-update-component incoming.debian.org >/dev/null
57 # Process (oldstable)-proposed-updates "NEW" queue
64 dak process-policy "${queue}" | tee -a REPORT | mail -a "X-Debian: DAK" -e -s "NEW changes in ${queue}" "${to}" -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
67 dak generate-packages-sources2 -s "${queue}"
69 STAMP=${STAMP:-$(date "+%Y%m%d%H%M")}
71 local exportdir="${qdir}/tree/${STAMP}"
72 local targetdir="${qdir}/export"
73 dak export -q "${queue}" -d "${exportdir}" --all
74 ln -sfT ${exportdir} ${targetdir}
75 find "${qdir}/tree" -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
78 # These versions used in dinstall
80 if [ "${PROGRAM}" = "dinstall" ]; then
81 log "Doing automated p-u-new processing"
83 cd "${queuedir}/p-u-new"
84 punew_do "$1" "${queuedir}/p-u-new" "debian-release@lists.debian.org"
88 if [ "${PROGRAM}" = "dinstall" ]; then
89 log "Doing automated o-p-u-new processing"
91 cd "${queuedir}/o-p-u-new"
92 punew_do "$1" "${queuedir}/o-p-u-new" "debian-release@lists.debian.org"
95 function backports_policy() {
96 local queue="backports-policy"
97 local qdir="/srv/backports-master.debian.org/queue/policy"
98 local to="backports-team@debian.org"
100 if [ "${PROGRAM}" = "dinstall" ]; then
101 log "Doing automated ${queue} processing"
105 punew_do "${queue}" "${qdir}" "${to}"
108 # Do the unchecked processing, in case we have files.
109 function do_unchecked () {
112 changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
113 report=$queuedir/REPORT
114 timestamp=$(date "+%Y-%m-%d %H:%M")
115 UNCHECKED_WITHOUT_LOCK=${UNCHECKED_WITHOUT_LOCK:-""}
117 echo "$timestamp": ${changes:-"Nothing to do"} >> $report
118 dak process-upload -a ${UNCHECKED_WITHOUT_LOCK} -d "$unchecked" >> $report
119 dak process-commands -d "$unchecked" >> $report
122 # process NEW policy queue
124 if [ "${PROGRAM}" = "dinstall" ]; then
125 log "Doing NEW processing"
127 (dak process-policy new; dak process-policy byhand) | mail -a "X-Debian: DAK" -e -s "NEW and BYHAND processing" ftpmaster@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
129 dak process-policy backports-new | mail -a "X-Debian: DAK" -e -s "NEW processing for backports-new" backports-team@debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
131 dak clean-suites -a new,backports-new
134 function sync_debbugs () {
137 timestamp=$(date "+%Y-%m-%d-%H:%M")
138 mkdir -p $queuedir/bts_version_track_archive/${timestamp}
139 rsync -aq $queuedir/bts_version_track/ $queuedir/bts_version_track_archive/${timestamp}
140 rmdir --ignore-fail-on-non-empty $queuedir/bts_version_track_archive/${timestamp} # remove if empty.
141 rsync -aq -e "ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30" --remove-source-files $queuedir/bts_version_track/ bugs-sync:/org/bugs.debian.org/versions/queue/ftp-master/ 2>/dev/null && touch $lockdir/synced_bts_version || true
143 TSTAMP=$(stat -c %Y $lockdir/synced_bts_version)
144 DIFF=$(( NOW - TSTAMP ))
145 if [ $DIFF -ge 259200 ]; then
146 log "Kids, you tried your best and you failed miserably. The lesson is, never try. (Homer Simpson)"
150 function clean_debbugs () {
151 # Delete files older than 60 days
152 find $queuedir/bts_version_track_archive/ -mtime +60 -type f -delete
153 # Delete empty directories
154 find $queuedir/bts_version_track_archive/ -empty -type d -delete
158 # Send a report on NEW/BYHAND packages
159 log "Nagging ftpteam about NEW/BYHAND packages"
160 dak queue-report | mail -a "X-Debian: DAK" -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
161 dak queue-report -d backports-new,backports-policy | mail -a "X-Debian: DAK" -e -s "NEW and POLICY on $(date +%D)" backports-team@debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
162 # and one on crufty packages
163 log "Sending information about crufty packages"
164 dak cruft-report -R > $webdir/cruft-report-daily.txt
165 dak cruft-report -R -s experimental >> $webdir/cruft-report-daily.txt
166 cat $webdir/cruft-report-daily.txt | mail -a "X-Debian: DAK" -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
169 function pg_timestamp() {
170 tsname=${1:-"unknown"}
171 log "Saving postgres transaction id for ${tsname}"
172 psql -tAc 'select txid_current();' > $base/backup/txid_${tsname}_$(date +%Y.%m.%d-%H:%M:%S)
175 function get_archiveroot() {
176 local archivename="$1"
177 local query="SELECT path FROM archive WHERE name='${archivename}'"
178 local archiveroot="$(psql -tAc "${query}")"
179 if [ -z "${archiveroot}" ]; then
180 echo "get_archiveroot: couldn't get archiveroot for '${archivename}'" >&2
183 echo "${archiveroot}"