2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
10 rm -f ${LOCK_ACCEPTED}
13 # If we error out this one is called, *FOLLOWED* by cleanup above
15 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
17 subject="ATTENTION ATTENTION!"
18 if [ "${error}" = "false" ]; then
19 subject="${subject} (continued)"
21 subject="${subject} (interrupted)"
23 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
25 cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org
28 ########################################################################
29 # the actual dinstall functions follow #
30 ########################################################################
32 # pushing merkels QA user, part one
34 log "Telling merkels QA user that we start dinstall"
35 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
38 # Create the postgres dump files
39 function pgdump_pre() {
40 log "Creating pre-daily-cron-job backup of projectb database..."
41 pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
44 function pgdump_post() {
45 log "Creating post-daily-cron-job backup of projectb database..."
47 POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
48 pg_dump projectb > $base/backup/dump_$POSTDUMP
49 pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
50 ln -sf $base/backup/dump_$POSTDUMP current
51 ln -sf $base/backup/dumpall_$POSTDUMP currentall
54 # Load the dak-dev projectb
57 echo "drop database projectb" | psql -p 5433 template1
58 cat currentall | psql -p 5433 template1
59 createdb -p 5433 -T template0 projectb
60 fgrep -v '\connect' current | psql -p 5433 projectb
63 # Updating various files
65 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
67 $scriptsdir/update-bugdoctxt
68 $scriptsdir/update-mirrorlists
69 $scriptsdir/update-mailingliststxt
70 $scriptsdir/update-pseudopackages.sh
73 # Process (oldstable)-proposed-updates "NEW" queue
76 dak process-policy $1 | tee -a REPORT | mail -e -s "NEW changes in $1" debian-release@lists.debian.org
80 log "Doing automated p-u-new processing"
81 cd "${queuedir}/p-u-new"
85 log "Doing automated o-p-u-new processing"
86 cd "${queuedir}/o-p-u-new"
90 # The first i18n one, syncing new descriptions
92 log "Synchronizing i18n package descriptions"
93 # First sync their newest data
94 cd ${scriptdir}/i18nsync
95 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
97 # Now check if we still know about the packages for which they created the files
98 # is the timestamp signed by us?
99 if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
100 # now read it. As its signed by us we are sure the content is what we expect, no need
101 # to do more here. And we only test -d a directory on it anyway.
102 TSTAMP=$(cat timestamp)
103 # do we have the dir still?
104 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
106 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
107 # Yay, worked, lets copy around
108 for dir in squeeze sid; do
109 if [ -d dists/${dir}/ ]; then
110 cd dists/${dir}/main/i18n
111 rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
113 cd ${scriptdir}/i18nsync
116 echo "ARRRR, bad guys, wrong files, ARRR"
117 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
120 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
121 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
124 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
125 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
130 log "Checking for cruft in overrides"
134 function dominate() {
135 log "Removing obsolete source and binary associations"
139 function filelist() {
140 log "Generating file lists for apt-ftparchive"
141 dak generate-filelist
144 function fingerprints() {
145 log "Not updating fingerprints - scripts needs checking"
147 log "Updating fingerprints"
148 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
151 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
153 if [ -s "${OUTFILE}" ]; then
154 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
155 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
156 To: <debian-project@lists.debian.org>
157 Subject: Debian Maintainers Keyring changes
158 Content-Type: text/plain; charset=utf-8
161 The following changes to the debian-maintainers keyring have just been activated:
165 Debian distribution maintenance software,
166 on behalf of the Keyring maintainers
173 function overrides() {
174 log "Writing overrides into text files"
179 rm -f override.sid.all3
180 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
184 log "Generating package / file mapping"
185 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
188 function packages() {
189 log "Generating Packages and Sources files"
191 GZIP='--rsyncable' ; export GZIP
192 apt-ftparchive generate apt.conf
196 log "Generating pdiff files"
197 dak generate-index-diffs
201 log "Generating Release files"
202 dak generate-releases
205 function dakcleanup() {
206 log "Cleanup old packages/files"
207 dak clean-suites -m 10000
211 function buildd_dir() {
212 # Rebuilt the buildd dir to avoid long times of 403
213 log "Regenerating the buildd incoming dir"
214 STAMP=$(date "+%Y%m%d%H%M")
223 log "Removing any core files ..."
224 find -type f -name core -print0 | xargs -0r rm -v
226 log "Checking permissions on files in the FTP tree ..."
227 find -type f \( \! -perm -444 -o -perm +002 \) -ls
228 find -type d \( \! -perm -555 -o -perm +002 \) -ls
230 log "Checking symlinks ..."
233 log "Creating recursive directory listing ... "
234 rm -f .${FILENAME}.new
235 TZ=UTC ls -lR > .${FILENAME}.new
237 if [ -r ${FILENAME}.gz ] ; then
238 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
239 mv -f .${FILENAME}.new ${FILENAME}
240 rm -f ${FILENAME}.patch.gz
241 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
242 rm -f ${FILENAME}.old.gz
244 mv -f .${FILENAME}.new ${FILENAME}
247 gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
251 function mkmaintainers() {
252 log 'Creating Maintainers index ... '
255 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
256 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
257 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
259 if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
260 log "installing Maintainers ... "
261 mv -f .new-maintainers Maintainers
262 gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
263 mv -f .new-maintainers.gz Maintainers.gz
265 rm -f .new-maintainers
269 function copyoverrides() {
270 log 'Copying override files into public view ...'
272 for ofile in $copyoverrides ; do
274 chmod g+w override.$ofile
278 newofile=override.$ofile.gz
279 rm -f .newover-$ofile.gz
280 pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`"
281 if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then
282 log " installing new $newofile $pc"
283 mv -f .newover-$ofile.gz $newofile
286 rm -f .newover-$ofile.gz
291 function mkfilesindices() {
293 cd $base/ftp/indices/files/components
297 log "Querying projectb..."
298 echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST
301 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
304 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
307 log "Generating sources list"
309 sed -n 's/|$//p' $ARCHLIST
311 find ./dists -maxdepth 1 \! -type d
312 find ./dists \! -type d | grep "/source/"
313 ) | sort -u | gzip --rsyncable -9 > source.list.gz
315 log "Generating arch lists"
317 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
319 (sed -n "s/|$a$//p" $ARCHLIST
320 sed -n 's/|all$//p' $ARCHLIST
323 find ./dists -maxdepth 1 \! -type d
324 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
325 ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
328 log "Generating suite lists"
331 printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb
333 printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb
336 printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb |
337 while read id suite; do
338 [ -e $base/ftp/dists/$suite ] || continue
341 distname=$(cd dists; readlink $suite || echo $suite)
342 find ./dists/$distname \! -type d
343 for distdir in ./dists/*; do
344 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
347 suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,'
348 ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
351 log "Finding everything on the ftp site to generate sundries"
352 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
355 zcat *.list.gz | cat - *.list | sort -u |
356 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
358 log "Generating files list"
361 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
362 cat - sundries.list dists.list project.list docs.list indices.list |
363 sort -u | poolfirst > ../arch-$a.files
367 for dist in sid squeeze; do
368 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
372 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-oldstable.list.gz suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
373 sort -u | poolfirst > ../typical.files
379 function mkchecksums() {
380 dsynclist=$dbdir/dsync.list
381 md5list=$indices/md5sums
383 log -n "Creating md5 / dsync index file ... "
386 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
387 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
388 ${bindir}/dsync-flist -q link-dups $dsynclist || true
392 log "Regenerating \"public\" mirror/ hardlink fun"
393 DATE_SERIAL=$(date +"%Y%m%d01")
394 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
395 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
396 SERIAL="$DATE_SERIAL"
398 SERIAL="$FILESOAPLUS1"
400 date -u > ${TRACEFILE}
401 echo "Using dak v1" >> ${TRACEFILE}
402 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
403 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
405 rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
409 log "Expiring old database dumps..."
411 $scriptsdir/expire_dumps -d . -p -f "dump_*"
414 function transitionsclean() {
415 log "Removing out of date transitions..."
417 dak transitions -c -a
421 # Send a report on NEW/BYHAND packages
422 log "Nagging ftpteam about NEW/BYHAND packages"
423 dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
424 # and one on crufty packages
425 log "Sending information about crufty packages"
426 dak cruft-report > $webdir/cruft-report-daily.txt
427 dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
428 cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
432 log "Updating DM html page"
433 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
437 log "Categorizing uncategorized bugs filed against ftp.debian.org"
442 # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
443 log "Trigger merkel/flotows projectb sync"
444 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
445 # Also trigger flotow, the ftpmaster test box
446 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
450 # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
451 log "Trigger merkels dd accessible parts sync"
452 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
455 function mirrorpush() {
456 log "Starting the mirrorpush"
457 date -u > /srv/ftp.debian.org/web/mirrorstart
458 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
459 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
460 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
464 log "Exporting package data foo for i18n project"
465 STAMP=$(date "+%Y%m%d%H%M")
466 mkdir -p ${scriptdir}/i18n/${STAMP}
467 cd ${scriptdir}/i18n/${STAMP}
468 dak control-suite -l stable > lenny
469 dak control-suite -l testing > squeeze
470 dak control-suite -l unstable > sid
471 echo "${STAMP}" > timestamp
472 gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
476 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
479 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
483 log "Updating stats data"
485 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
486 R --slave --vanilla < $base/misc/ftpstats.R
487 dak stats arch-space > $webdir/arch-space
488 dak stats pkg-nums > $webdir/pkg-nums
491 function aptftpcleanup() {
492 log "Clean up apt-ftparchive's databases"
494 apt-ftparchive -q clean apt.conf
497 function compress() {
498 log "Compress old psql backups"
500 find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
502 find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
503 while read dumpname; do
504 echo "Compressing $dumpname"
505 bzip2 -9fv "$dumpname"
507 find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
508 while read dumpname; do
509 echo "Compressing $dumpname"
510 bzip2 -9fv "$dumpname"
512 finddup -l -d $base/backup
515 function logstats() {
516 $masterdir/tools/logs.py "$1"
519 # save timestamp when we start
520 function savetimestamp() {
521 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
522 echo ${NOW} > "${dbdir}/dinstallstart"
525 function maillogfile() {
526 cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
529 function renamelogfile() {
530 if [ -f "${dbdir}/dinstallstart" ]; then
531 NOW=$(cat "${dbdir}/dinstallstart")
533 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
534 logstats "$logdir/dinstall_${NOW}.log"
535 bzip2 -9 "$logdir/dinstall_${NOW}.log"
537 error "Problem, I don't know when dinstall started, unable to do log statistics."
538 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
540 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
541 bzip2 -9 "$logdir/dinstall_${NOW}.log"
545 function testingsourcelist() {
546 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
549 # do a last run of process-unchecked before dinstall is on.
550 function process_unchecked() {
551 log "Processing the unchecked queue"
552 UNCHECKED_WITHOUT_LOCK="-p"
557 # do a run of newstage only before dinstall is on.
558 function newstage() {
559 log "Processing the newstage queue"
560 UNCHECKED_WITHOUT_LOCK="-p"
564 # Function to update a "statefile" telling people what we are doing
567 # This should be called with the argument(s)
568 # - Status name we want to show.
571 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
572 cat >"${DINSTALLSTATE}" <<EOF
573 Dinstall start: ${DINSTALLBEGIN}
575 Action start: ${RIGHTNOW}