1 # Timestamp. Used for dinstall stat graphs
3 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
12 # If we error out this one is called, *FOLLOWED* by cleanup above
14 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
16 subject="ATTENTION ATTENTION!"
17 if [ "${error}" = "false" ]; then
18 subject="${subject} (continued)"
20 subject="${subject} (interrupted)"
22 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
24 cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org
27 ########################################################################
28 # the actual dinstall functions follow #
29 ########################################################################
31 # pushing merkels QA user, part one
33 log "Telling merkels QA user that we start dinstall"
34 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
37 # Create the postgres dump files
38 function pgdump_pre() {
39 log "Creating pre-daily-cron-job backup of projectb database..."
40 pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
43 function pgdump_post() {
44 log "Creating post-daily-cron-job backup of projectb database..."
46 POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
47 pg_dump projectb > $base/backup/dump_$POSTDUMP
48 pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
49 ln -sf $base/backup/dump_$POSTDUMP current
50 ln -sf $base/backup/dumpall_$POSTDUMP currentall
53 # Load the dak-dev projectb
56 echo "drop database projectb" | psql -p 5433 template1
57 cat currentall | psql -p 5433 template1
58 createdb -p 5433 -T template0 projectb
59 fgrep -v '\connect' current | psql -p 5433 projectb
62 # Updating various files
64 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
66 $scriptsdir/update-bugdoctxt
67 $scriptsdir/update-mirrorlists
68 $scriptsdir/update-mailingliststxt
69 $scriptsdir/update-pseudopackages.sh
72 # Process (oldstable)-proposed-updates "NEW" queue
76 dak process-new -a -C COMMENTS >> REPORT || true
80 log "Doing automated p-u-new processing"
84 log "Doing automated o-p-u-new processing"
88 # The first i18n one, syncing new descriptions
90 log "Synchronizing i18n package descriptions"
91 # First sync their newest data
92 cd ${scriptdir}/i18nsync
93 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
95 # Now check if we still know about the packages for which they created the files
96 # is the timestamp signed by us?
97 if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
98 # now read it. As its signed by us we are sure the content is what we expect, no need
99 # to do more here. And we only test -d a directory on it anyway.
100 TSTAMP=$(cat timestamp)
101 # do we have the dir still?
102 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
104 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
105 # Yay, worked, lets copy around
106 for dir in squeeze sid; do
107 if [ -d dists/${dir}/ ]; then
108 cd dists/${dir}/main/i18n
109 rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
111 cd ${scriptdir}/i18nsync
114 echo "ARRRR, bad guys, wrong files, ARRR"
115 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
118 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
119 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
122 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
123 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
128 log "Checking for cruft in overrides"
133 log "Generating suite file lists for apt-ftparchive"
134 dak make-suite-file-list
137 function filelist() {
138 log "Generating file lists for apt-ftparchive"
139 dak generate-filelist
142 function fingerprints() {
143 log "Not updating fingerprints - scripts needs checking"
145 # log "Updating fingerprints"
146 # dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
149 # dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
151 # if [ -s "${OUTFILE}" ]; then
152 # /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
153 #From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
154 #To: <debian-project@lists.debian.org>
155 #Subject: Debian Maintainers Keyring changes
156 #Content-Type: text/plain; charset=utf-8
159 #The following changes to the debian-maintainers keyring have just been activated:
163 #Debian distribution maintenance software,
164 #on behalf of the Keyring maintainers
171 function overrides() {
172 log "Writing overrides into text files"
177 rm -f override.sid.all3
178 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
182 log "Generating package / file mapping"
183 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
186 function packages() {
187 log "Generating Packages and Sources files"
189 GZIP='--rsyncable' ; export GZIP
190 apt-ftparchive generate apt.conf
194 log "Generating pdiff files"
195 dak generate-index-diffs
199 log "Generating Release files"
200 dak generate-releases
203 function dakcleanup() {
204 log "Cleanup old packages/files"
205 dak clean-suites -m 10000
209 function buildd_dir() {
210 # Rebuilt the buildd dir to avoid long times of 403
211 log "Regenerating the buildd incoming dir"
212 STAMP=$(date "+%Y%m%d%H%M")
221 log "Removing any core files ..."
222 find -type f -name core -print0 | xargs -0r rm -v
224 log "Checking permissions on files in the FTP tree ..."
225 find -type f \( \! -perm -444 -o -perm +002 \) -ls
226 find -type d \( \! -perm -555 -o -perm +002 \) -ls
228 log "Checking symlinks ..."
231 log "Creating recursive directory listing ... "
232 rm -f .${FILENAME}.new
233 TZ=UTC ls -lR > .${FILENAME}.new
235 if [ -r ${FILENAME}.gz ] ; then
236 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
237 mv -f .${FILENAME}.new ${FILENAME}
238 rm -f ${FILENAME}.patch.gz
239 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
240 rm -f ${FILENAME}.old.gz
242 mv -f .${FILENAME}.new ${FILENAME}
245 gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
249 function mkmaintainers() {
250 log -n 'Creating Maintainers index ... '
253 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
254 sed -e "s/~[^ ]*\([ ]\)/\1/" | awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
257 cmp .new-maintainers Maintainers >/dev/null
260 if [ $rc = 1 ] || [ ! -f Maintainers ] ; then
261 log -n "installing Maintainers ... "
262 mv -f .new-maintainers Maintainers
263 gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
264 mv -f .new-maintainers.gz Maintainers.gz
265 elif [ $rc = 0 ] ; then
266 log '(same as before)'
267 rm -f .new-maintainers
274 function copyoverrides() {
275 log 'Copying override files into public view ...'
277 for f in $copyoverrides ; do
279 chmod g+w override.$f
283 pc="`gzip 2>&1 -9nv <$overridedir/override.$f >.newover-$f.gz`"
286 cmp -s .newover-$f.gz $nf
291 elif [ $rc = 1 -o ! -f $nf ]; then
292 log " installing new $nf $pc"
293 mv -f .newover-$f.gz $nf
302 function mkfilesindices() {
304 cd $base/ftp/indices/files/components
308 log "Querying projectb..."
309 echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST
312 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
315 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
318 log "Generating sources list
320 sed -n 's/|$//p' $ARCHLIST
322 find ./dists -maxdepth 1 \! -type d
323 find ./dists \! -type d | grep "/source/"
324 ) | sort -u | gzip --rsyncable -9 > source.list.gz
326 log "Generating arch lists
328 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
330 (sed -n "s/|$a$//p" $ARCHLIST
331 sed -n 's/|all$//p' $ARCHLIST
334 find ./dists -maxdepth 1 \! -type d
335 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
336 ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
339 log "Generating suite lists"
342 printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb
344 printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb
347 printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb |
348 while read id suite; do
349 [ -e $base/ftp/dists/$suite ] || continue
352 distname=$(cd dists; readlink $suite || echo $suite)
353 find ./dists/$distname \! -type d
354 for distdir in ./dists/*; do
355 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
358 suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,'
359 ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
362 log "Finding everything on the ftp site to generate sundries"
364 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
367 zcat *.list.gz | cat - *.list | sort -u |
368 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
370 log "Generating files list"
373 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
374 cat - sundries.list dists.list project.list docs.list indices.list |
375 sort -u | poolfirst > ../arch-$a.files
379 for dist in sid squeeze; do
380 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
384 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-oldstable.list.gz suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
385 sort -u | poolfirst > ../typical.files
391 function mkchecksums() {
392 dsynclist=$dbdir/dsync.list
393 md5list=$indices/md5sums
395 log -n "Creating md5 / dsync index file ... "
398 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
399 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
400 ${bindir}/dsync-flist -q link-dups $dsynclist || true
404 log "Running various scripts from $scriptsdir"
413 log "Regenerating \"public\" mirror/ hardlink fun"
415 rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
419 log "Trigger daily wanna-build run"
420 ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org
424 log "Expiring old database dumps..."
426 $scriptsdir/expire_dumps -d . -p -f "dump_*"
429 function transitionsclean() {
430 log "Removing out of date transitions..."
432 dak transitions -c -a
436 # Send a report on NEW/BYHAND packages
437 log "Nagging ftpteam about NEW/BYHAND packages"
438 dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
439 # and one on crufty packages
440 log "Sending information about crufty packages"
441 dak cruft-report > $webdir/cruft-report-daily.txt
442 dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
443 cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
447 log "Updating DM html page"
448 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
452 log "Categorizing uncategorized bugs filed against ftp.debian.org"
457 # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
458 log "Trigger merkel/flotows projectb sync"
459 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
460 # Also trigger flotow, the ftpmaster test box
461 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
465 # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
466 log "Trigger merkels dd accessible parts sync"
467 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
470 function mirrorpush() {
471 log "Starting the mirrorpush"
472 date -u > /srv/ftp.debian.org/web/mirrorstart
473 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
474 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
475 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
479 log "Exporting package data foo for i18n project"
480 STAMP=$(date "+%Y%m%d%H%M")
481 mkdir -p ${scriptdir}/i18n/${STAMP}
482 cd ${scriptdir}/i18n/${STAMP}
483 dak control-suite -l stable > lenny
484 dak control-suite -l testing > squeeze
485 dak control-suite -l unstable > sid
486 echo "${STAMP}" > timestamp
487 gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
491 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
494 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
498 log "Updating stats data"
500 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
501 R --slave --vanilla < $base/misc/ftpstats.R
502 dak stats arch-space > $webdir/arch-space
503 dak stats pkg-nums > $webdir/pkg-nums
506 function aptftpcleanup() {
507 log "Clean up apt-ftparchive's databases"
509 apt-ftparchive -q clean apt.conf
512 function compress() {
513 log "Compress old psql backups"
515 find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
517 find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
518 while read dumpname; do
519 echo "Compressing $dumpname"
520 bzip2 -9fv "$dumpname"
522 find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
523 while read dumpname; do
524 echo "Compressing $dumpname"
525 bzip2 -9fv "$dumpname"
527 finddup -l -d $base/backup
530 function logstats() {
531 $masterdir/tools/logs.py "$1"
534 # save timestamp when we start
535 function savetimestamp() {
536 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
537 echo ${NOW} > "${dbdir}/dinstallstart"
540 function maillogfile() {
541 cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
544 function renamelogfile() {
545 if [ -f "${dbdir}/dinstallstart" ]; then
546 NOW=$(cat "${dbdir}/dinstallstart")
548 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
549 logstats "$logdir/dinstall_${NOW}.log"
550 bzip2 -9 "$logdir/dinstall_${NOW}.log"
552 error "Problem, I don't know when dinstall started, unable to do log statistics."
553 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
555 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
556 bzip2 -9 "$logdir/dinstall_${NOW}.log"
560 function testingsourcelist() {
561 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
564 # do a last run of process-unchecked before dinstall is on.
565 function process_unchecked() {
566 log "Processing the unchecked queue"
567 UNCHECKED_WITHOUT_LOCK="-p"