2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
10 rm -f ${LOCK_ACCEPTED}
13 # If we error out this one is called, *FOLLOWED* by cleanup above
15 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
17 subject="ATTENTION ATTENTION!"
18 if [ "${error}" = "false" ]; then
19 subject="${subject} (continued)"
21 subject="${subject} (interrupted)"
23 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
25 cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@backports.debian.org
28 ########################################################################
29 # the actual dinstall functions follow #
30 ########################################################################
32 # pushing merkels QA user, part one
34 log "Telling merkels QA user that we start dinstall"
35 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
38 # Create the postgres dump files
39 function pgdump_pre() {
40 log "Creating pre-daily-cron-job backup of $PGDATABASE database..."
41 pg_dump > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
44 function pgdump_post() {
45 log "Creating post-daily-cron-job backup of $PGDATABASE database..."
47 POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
48 pg_dump > $base/backup/dump_$POSTDUMP
49 #pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
50 ln -sf $base/backup/dump_$POSTDUMP current
51 #ln -sf $base/backup/dumpall_$POSTDUMP currentall
54 # Load the dak-dev projectb
56 # Make sure to unset any possible psql variables so we don't drop the wrong
57 # f****** database by accident
67 echo "drop database projectb" | psql -p 5434 template1
68 #cat currentall | psql -p 5433 template1
69 createdb -p 5434 -T template1 projectb
70 fgrep -v '\connect' current | psql -p 5434 projectb
73 # Updating various files
75 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
77 $scriptsdir/update-bugdoctxt
78 $scriptsdir/update-mirrorlists
79 $scriptsdir/update-mailingliststxt
80 $scriptsdir/update-pseudopackages.sh
83 # Process (oldstable)-proposed-updates "NEW" queue
86 dak process-policy $1 | tee -a REPORT | mail -e -s "NEW changes in $1" debian-release@lists.debian.org
90 log "Doing automated p-u-new processing"
91 cd "${queuedir}/p-u-new"
95 log "Doing automated o-p-u-new processing"
96 cd "${queuedir}/o-p-u-new"
100 # The first i18n one, syncing new descriptions
102 log "Synchronizing i18n package descriptions"
103 # First sync their newest data
104 cd ${scriptdir}/i18nsync
105 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
107 # Now check if we still know about the packages for which they created the files
108 # is the timestamp signed by us?
109 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
110 # now read it. As its signed by us we are sure the content is what we expect, no need
111 # to do more here. And we only test -d a directory on it anyway.
112 TSTAMP=$(cat timestamp)
113 # do we have the dir still?
114 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
116 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
117 # Yay, worked, lets copy around
118 for dir in squeeze sid; do
119 if [ -d dists/${dir}/ ]; then
120 cd dists/${dir}/main/i18n
121 rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
123 cd ${scriptdir}/i18nsync
126 echo "ARRRR, bad guys, wrong files, ARRR"
127 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
130 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
131 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
134 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
135 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
140 log "Checking for cruft in overrides"
144 function dominate() {
145 log "Removing obsolete source and binary associations"
149 function filelist() {
150 log "Generating file lists for apt-ftparchive"
151 dak generate-filelist
154 function fingerprints() {
155 log "Not updating fingerprints - scripts needs checking"
157 log "Updating fingerprints"
158 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
161 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
163 if [ -s "${OUTFILE}" ]; then
164 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
165 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
166 To: <debian-project@lists.debian.org>
167 Subject: Debian Maintainers Keyring changes
168 Content-Type: text/plain; charset=utf-8
171 The following changes to the debian-maintainers keyring have just been activated:
175 Debian distribution maintenance software,
176 on behalf of the Keyring maintainers
183 function overrides() {
184 log "Writing overrides into text files"
189 rm -f override.sid.all3
190 for i in main contrib non-free main.debian-installer; do cat override.lenny-backports.$i >> override.sid.all3; done
194 log "Generating package / file mapping"
195 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
198 function packages() {
199 log "Generating Packages and Sources files"
201 GZIP='--rsyncable' ; export GZIP
202 apt-ftparchive generate apt.conf
203 #dak generate-packages-sources
207 log "Generating pdiff files"
208 dak generate-index-diffs
212 log "Generating Release files"
213 dak generate-releases
216 function dakcleanup() {
217 log "Cleanup old packages/files"
218 dak clean-suites -m 10000
222 function buildd_dir() {
223 # Rebuilt the buildd dir to avoid long times of 403
224 log "Regenerating the buildd incoming dir"
225 STAMP=$(date "+%Y%m%d%H%M")
234 log "Removing any core files ..."
235 find -type f -name core -print0 | xargs -0r rm -v
237 log "Checking permissions on files in the FTP tree ..."
238 find -type f \( \! -perm -444 -o -perm +002 \) -ls
239 find -type d \( \! -perm -555 -o -perm +002 \) -ls
241 log "Checking symlinks ..."
244 log "Creating recursive directory listing ... "
245 rm -f .${FILENAME}.new
246 TZ=UTC ls -lR > .${FILENAME}.new
248 if [ -r ${FILENAME}.gz ] ; then
249 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
250 mv -f .${FILENAME}.new ${FILENAME}
251 rm -f ${FILENAME}.patch.gz
252 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
253 rm -f ${FILENAME}.old.gz
255 mv -f .${FILENAME}.new ${FILENAME}
258 gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
262 function mkmaintainers() {
263 log 'Creating Maintainers index ... '
266 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
267 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
268 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
270 if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
271 log "installing Maintainers ... "
272 mv -f .new-maintainers Maintainers
273 gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
274 mv -f .new-maintainers.gz Maintainers.gz
276 rm -f .new-maintainers
280 function mkuploaders() {
281 log 'Creating Uploaders index ... '
284 dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
285 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
286 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders
288 if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then
289 log "installing Uploaders ... "
290 mv -f .new-uploaders Uploaders
291 gzip --rsyncable -9v <Uploaders >.new-uploaders.gz
292 mv -f .new-uploaders.gz Uploaders.gz
298 function copyoverrides() {
299 log 'Copying override files into public view ...'
301 for ofile in $copyoverrides ; do
303 chmod g+w override.$ofile
307 newofile=override.$ofile.gz
308 rm -f .newover-$ofile.gz
309 pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`"
310 if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then
311 log " installing new $newofile $pc"
312 mv -f .newover-$ofile.gz $newofile
315 rm -f .newover-$ofile.gz
320 function mkfilesindices() {
322 cd $base/ftp/indices/files/components
326 log "Querying $PGDATABASE..."
327 echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST
330 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
333 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
336 log "Generating sources list"
338 sed -n 's/|$//p' $ARCHLIST
340 find ./dists -maxdepth 1 \! -type d
341 find ./dists \! -type d | grep "/source/"
342 ) | sort -u | gzip --rsyncable -9 > source.list.gz
344 log "Generating arch lists"
346 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
348 (sed -n "s/|$a$//p" $ARCHLIST
349 sed -n 's/|all$//p' $ARCHLIST
352 find ./dists -maxdepth 1 \! -type d
353 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
354 ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
357 log "Generating suite lists"
360 printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t
362 printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t
365 printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At |
366 while read id suite; do
367 [ -e $base/ftp/dists/$suite ] || continue
370 distname=$(cd dists; readlink $suite || echo $suite)
371 find ./dists/$distname \! -type d
372 for distdir in ./dists/*; do
373 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
376 suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,'
377 ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
380 log "Finding everything on the ftp site to generate sundries"
381 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
384 zcat *.list.gz | cat - *.list | sort -u |
385 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
387 log "Generating files list"
390 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
391 cat - sundries.list dists.list project.list docs.list indices.list |
392 sort -u | poolfirst > ../arch-$a.files
396 for dist in sid squeeze; do
397 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
401 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
402 sort -u | poolfirst > ../typical.files
408 function mkchecksums() {
409 dsynclist=$dbdir/dsync.list
410 md5list=$indices/md5sums
412 log -n "Creating md5 / dsync index file ... "
415 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
416 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
417 ${bindir}/dsync-flist -q link-dups $dsynclist || true
421 log "Regenerating \"public\" mirror/ hardlink fun"
422 DATE_SERIAL=$(date +"%Y%m%d01")
423 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
424 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
425 SERIAL="$DATE_SERIAL"
427 SERIAL="$FILESOAPLUS1"
429 date -u > ${TRACEFILE}
430 echo "Using dak v1" >> ${TRACEFILE}
431 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
432 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
434 rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
438 log "Expiring old database dumps..."
440 $scriptsdir/expire_dumps -d . -p -f "dump_*"
443 function transitionsclean() {
444 log "Removing out of date transitions..."
446 dak transitions -c -a
450 log "Updating DM html page"
451 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
455 log "Categorizing uncategorized bugs filed against ftp.debian.org"
460 # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
461 log "Trigger merkel/flotows $PGDATABASE sync"
462 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
463 # Also trigger flotow, the ftpmaster test box
464 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
468 # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
469 log "Trigger merkels dd accessible parts sync"
470 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
473 function mirrorpush() {
474 log "Starting the mirrorpush"
475 date -u > /srv/backports-web.debian.org/underlay/mirrorstart
476 echo "Using dak v1" >> /srv/backports-web.debian.org/underlay/mirrorstart
477 echo "Running on host $(hostname -f)" >> /srv/backports-web.debian.org/underlay/mirrorstart
478 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
482 log "Exporting package data foo for i18n project"
483 STAMP=$(date "+%Y%m%d%H%M")
484 mkdir -p ${scriptdir}/i18n/${STAMP}
485 cd ${scriptdir}/i18n/${STAMP}
486 dak control-suite -l stable > lenny
487 dak control-suite -l testing > squeeze
488 dak control-suite -l unstable > sid
489 echo "${STAMP}" > timestamp
490 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
494 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
497 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
501 log "Updating stats data"
503 #$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
504 #R --slave --vanilla < $base/misc/ftpstats.R
505 dak stats arch-space > $webdir/arch-space
506 dak stats pkg-nums > $webdir/pkg-nums
509 function aptftpcleanup() {
510 log "Clean up apt-ftparchive's databases"
512 apt-ftparchive -q clean apt.conf
515 function compress() {
516 log "Compress old psql backups"
518 find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
520 find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
521 while read dumpname; do
522 echo "Compressing $dumpname"
523 bzip2 -9fv "$dumpname"
525 find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
526 while read dumpname; do
527 echo "Compressing $dumpname"
528 bzip2 -9fv "$dumpname"
530 finddup -l -d $base/backup
533 function logstats() {
534 $masterdir/tools/logs.py "$1"
537 # save timestamp when we start
538 function savetimestamp() {
539 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
540 echo ${NOW} > "${dbdir}/dinstallstart"
543 function maillogfile() {
544 cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
547 function renamelogfile() {
548 if [ -f "${dbdir}/dinstallstart" ]; then
549 NOW=$(cat "${dbdir}/dinstallstart")
551 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
552 # logstats "$logdir/dinstall_${NOW}.log"
553 bzip2 -9 "$logdir/dinstall_${NOW}.log"
555 error "Problem, I don't know when dinstall started, unable to do log statistics."
556 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
558 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
559 bzip2 -9 "$logdir/dinstall_${NOW}.log"
563 function testingsourcelist() {
564 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
567 # do a last run of process-unchecked before dinstall is on.
568 function process_unchecked() {
569 log "Processing the unchecked queue"
570 UNCHECKED_WITHOUT_LOCK="-p"
575 # do a run of newstage only before dinstall is on.
576 function newstage() {
577 log "Processing the newstage queue"
578 UNCHECKED_WITHOUT_LOCK="-p"
582 # Function to update a "statefile" telling people what we are doing
585 # This should be called with the argument(s)
586 # - Status name we want to show.
589 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
590 cat >"${DINSTALLSTATE}" <<EOF
591 Dinstall start: ${DINSTALLBEGIN}
593 Action start: ${RIGHTNOW}
597 # extract changelogs and stuff
598 function changelogs() {
599 log "Extracting changelogs"
600 dak make-changelog -e
601 mkdir -p ${exportpublic}/changelogs
602 cd ${exportpublic}/changelogs
603 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
604 sudo -H -u archvsync /home/archvsync/runmirrors metabdo > ~dak/runmirrors-metadata.log 2>&1 &