2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
10 rm -f ${LOCK_ACCEPTED}
13 # If we error out this one is called, *FOLLOWED* by cleanup above
15 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
17 subject="ATTENTION ATTENTION!"
18 if [ "${error}" = "false" ]; then
19 subject="${subject} (continued)"
21 subject="${subject} (interrupted)"
23 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
25 cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@backports.debian.org
28 ########################################################################
29 # the actual dinstall functions follow #
30 ########################################################################
32 # pushing merkels QA user, part one
34 log "Telling merkels QA user that we start dinstall"
35 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
38 # Create the postgres dump files
39 function pgdump_pre() {
40 log "Creating pre-daily-cron-job backup of $PGDATABASE database..."
41 pg_dump > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
44 function pgdump_post() {
45 log "Creating post-daily-cron-job backup of $PGDATABASE database..."
47 POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
48 pg_dump > $base/backup/dump_$POSTDUMP
49 #pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
50 ln -sf $base/backup/dump_$POSTDUMP current
51 #ln -sf $base/backup/dumpall_$POSTDUMP currentall
54 # Load the dak-dev projectb
56 # Make sure to unset any possible psql variables so we don't drop the wrong
57 # f****** database by accident
67 echo "drop database projectb" | psql -p 5434 template1
68 #cat currentall | psql -p 5433 template1
69 createdb -p 5434 -T template1 projectb
70 fgrep -v '\connect' current | psql -p 5434 projectb
73 # Updating various files
75 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
77 $scriptsdir/update-bugdoctxt
78 $scriptsdir/update-mirrorlists
79 $scriptsdir/update-mailingliststxt
80 $scriptsdir/update-pseudopackages.sh
83 # Process (oldstable)-proposed-updates "NEW" queue
86 dak process-policy $1 | tee -a REPORT | mail -e -s "NEW changes in $1" team@backports.debian.org
90 log "Doing automated p-u-new processing"
91 cd "${queuedir}/p-u-new"
95 log "Doing automated o-p-u-new processing"
96 cd "${queuedir}/o-p-u-new"
100 # The first i18n one, syncing new descriptions
102 log "Synchronizing i18n package descriptions"
103 # First sync their newest data
104 cd ${scriptdir}/i18nsync
105 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
107 # Now check if we still know about the packages for which they created the files
108 # is the timestamp signed by us?
109 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
110 # now read it. As its signed by us we are sure the content is what we expect, no need
111 # to do more here. And we only test -d a directory on it anyway.
112 TSTAMP=$(cat timestamp)
113 # do we have the dir still?
114 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
116 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
117 # Yay, worked, lets copy around
118 for dir in squeeze sid; do
119 if [ -d dists/${dir}/ ]; then
120 cd dists/${dir}/main/i18n
121 rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
123 cd ${scriptdir}/i18nsync
126 echo "ARRRR, bad guys, wrong files, ARRR"
127 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
130 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
131 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
134 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
135 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
140 log "Checking for cruft in overrides"
144 function dominate() {
145 log "Removing obsolete source and binary associations"
149 function filelist() {
150 log "Generating file lists for apt-ftparchive"
151 dak generate-filelist
154 function fingerprints() {
155 log "Not updating fingerprints - scripts needs checking"
157 log "Updating fingerprints"
158 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
161 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
163 if [ -s "${OUTFILE}" ]; then
164 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
165 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
166 To: <debian-project@lists.debian.org>
167 Subject: Debian Maintainers Keyring changes
168 Content-Type: text/plain; charset=utf-8
171 The following changes to the debian-maintainers keyring have just been activated:
175 Debian distribution maintenance software,
176 on behalf of the Keyring maintainers
183 function overrides() {
184 log "Writing overrides into text files"
189 rm -f override.sid.all3
190 for i in main contrib non-free main.debian-installer; do cat override.lenny-backports.$i >> override.sid.all3; done
194 log "Generating package / file mapping"
195 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
198 function packages() {
199 log "Generating Packages and Sources files"
201 apt-ftparchive generate apt.conf
202 #dak generate-packages-sources
206 log "Generating pdiff files"
207 dak generate-index-diffs
211 log "Generating Release files"
212 dak generate-releases
215 function dakcleanup() {
216 log "Cleanup old packages/files"
217 dak clean-suites -m 10000
221 function buildd_dir() {
222 # Rebuilt the buildd dir to avoid long times of 403
223 log "Regenerating the buildd incoming dir"
224 STAMP=$(date "+%Y%m%d%H%M")
233 log "Removing any core files ..."
234 find -type f -name core -print0 | xargs -0r rm -v
236 log "Checking permissions on files in the FTP tree ..."
237 find -type f \( \! -perm -444 -o -perm +002 \) -ls
238 find -type d \( \! -perm -555 -o -perm +002 \) -ls
240 log "Checking symlinks ..."
243 log "Creating recursive directory listing ... "
244 rm -f .${FILENAME}.new
245 TZ=UTC ls -lR > .${FILENAME}.new
247 if [ -r ${FILENAME}.gz ] ; then
248 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
249 mv -f .${FILENAME}.new ${FILENAME}
250 rm -f ${FILENAME}.patch.gz
251 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz
252 rm -f ${FILENAME}.old.gz
254 mv -f .${FILENAME}.new ${FILENAME}
257 gzip -9cfN ${FILENAME} >${FILENAME}.gz
261 function mkmaintainers() {
262 log 'Creating Maintainers index ... '
265 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
266 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
267 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
269 if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
270 log "installing Maintainers ... "
271 mv -f .new-maintainers Maintainers
272 gzip -9v <Maintainers >.new-maintainers.gz
273 mv -f .new-maintainers.gz Maintainers.gz
275 rm -f .new-maintainers
279 function mkuploaders() {
280 log 'Creating Uploaders index ... '
283 dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
284 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
285 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders
287 if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then
288 log "installing Uploaders ... "
289 mv -f .new-uploaders Uploaders
290 gzip -9v <Uploaders >.new-uploaders.gz
291 mv -f .new-uploaders.gz Uploaders.gz
297 function copyoverrides() {
298 log 'Copying override files into public view ...'
300 for ofile in $copyoverrides ; do
302 chmod g+w override.$ofile
306 newofile=override.$ofile.gz
307 rm -f .newover-$ofile.gz
308 pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`"
309 if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then
310 log " installing new $newofile $pc"
311 mv -f .newover-$ofile.gz $newofile
314 rm -f .newover-$ofile.gz
319 function mkfilesindices() {
321 cd $base/ftp/indices/files/components
325 log "Querying $PGDATABASE..."
326 echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST
329 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
332 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
335 log "Generating sources list"
337 sed -n 's/|$//p' $ARCHLIST
339 find ./dists -maxdepth 1 \! -type d
340 find ./dists \! -type d | grep "/source/"
341 ) | sort -u | gzip -9 > source.list.gz
343 log "Generating arch lists"
345 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
347 (sed -n "s/|$a$//p" $ARCHLIST
348 sed -n 's/|all$//p' $ARCHLIST
351 find ./dists -maxdepth 1 \! -type d
352 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
353 ) | sort -u | gzip -9 > arch-$a.list.gz
356 log "Generating suite lists"
359 printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t
361 printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t
364 printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At |
365 while read id suite; do
366 [ -e $base/ftp/dists/$suite ] || continue
369 distname=$(cd dists; readlink $suite || echo $suite)
370 find ./dists/$distname \! -type d
371 for distdir in ./dists/*; do
372 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
375 suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,'
376 ) | sort -u | gzip -9 > suite-${suite}.list.gz
379 log "Finding everything on the ftp site to generate sundries"
380 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
383 zcat *.list.gz | cat - *.list | sort -u |
384 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
386 log "Generating files list"
389 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
390 cat - sundries.list dists.list project.list docs.list indices.list |
391 sort -u | poolfirst > ../arch-$a.files
395 for dist in sid squeeze; do
396 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
400 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
401 sort -u | poolfirst > ../typical.files
407 function mkchecksums() {
408 dsynclist=$dbdir/dsync.list
409 md5list=$indices/md5sums
411 log -n "Creating md5 / dsync index file ... "
414 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
415 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
416 ${bindir}/dsync-flist -q link-dups $dsynclist || true
420 log "Regenerating \"public\" mirror/ hardlink fun"
421 DATE_SERIAL=$(date +"%Y%m%d01")
422 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
423 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
424 SERIAL="$DATE_SERIAL"
426 SERIAL="$FILESOAPLUS1"
428 date -u > ${TRACEFILE}
429 echo "Using dak v1" >> ${TRACEFILE}
430 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
431 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
433 rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
437 log "Expiring old database dumps..."
439 $scriptsdir/expire_dumps -d . -p -f "dump_*"
442 function transitionsclean() {
443 log "Removing out of date transitions..."
445 dak transitions -c -a
449 log "Updating DM html page"
450 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
454 log "Categorizing uncategorized bugs filed against ftp.debian.org"
459 # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
460 log "Trigger merkel/flotows $PGDATABASE sync"
461 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
462 # Also trigger flotow, the ftpmaster test box
463 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
467 # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
468 log "Trigger merkels dd accessible parts sync"
469 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
472 function mirrorpush() {
473 log "Starting the mirrorpush"
474 date -u > /srv/backports-web.debian.org/underlay/mirrorstart
475 echo "Using dak v1" >> /srv/backports-web.debian.org/underlay/mirrorstart
476 echo "Running on host $(hostname -f)" >> /srv/backports-web.debian.org/underlay/mirrorstart
477 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
481 log "Exporting package data foo for i18n project"
482 STAMP=$(date "+%Y%m%d%H%M")
483 mkdir -p ${scriptdir}/i18n/${STAMP}
484 cd ${scriptdir}/i18n/${STAMP}
485 dak control-suite -l stable > lenny
486 dak control-suite -l testing > squeeze
487 dak control-suite -l unstable > sid
488 echo "${STAMP}" > timestamp
489 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
493 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
496 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
500 log "Updating stats data"
502 #$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
503 #R --slave --vanilla < $base/misc/ftpstats.R
504 dak stats arch-space > $webdir/arch-space
505 dak stats pkg-nums > $webdir/pkg-nums
508 function aptftpcleanup() {
509 log "Clean up apt-ftparchive's databases"
511 apt-ftparchive -q clean apt.conf
514 function compress() {
515 log "Compress old psql backups"
517 find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
519 find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
520 while read dumpname; do
521 echo "Compressing $dumpname"
522 bzip2 -9fv "$dumpname"
524 find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
525 while read dumpname; do
526 echo "Compressing $dumpname"
527 bzip2 -9fv "$dumpname"
529 finddup -l -d $base/backup
532 function logstats() {
533 $masterdir/tools/logs.py "$1"
536 # save timestamp when we start
537 function savetimestamp() {
538 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
539 echo ${NOW} > "${dbdir}/dinstallstart"
542 function maillogfile() {
543 cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
546 function renamelogfile() {
547 if [ -f "${dbdir}/dinstallstart" ]; then
548 NOW=$(cat "${dbdir}/dinstallstart")
550 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
551 # logstats "$logdir/dinstall_${NOW}.log"
552 bzip2 -9 "$logdir/dinstall_${NOW}.log"
554 error "Problem, I don't know when dinstall started, unable to do log statistics."
555 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
557 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
558 bzip2 -9 "$logdir/dinstall_${NOW}.log"
562 function testingsourcelist() {
563 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
566 # do a last run of process-unchecked before dinstall is on.
567 function process_unchecked() {
568 log "Processing the unchecked queue"
569 UNCHECKED_WITHOUT_LOCK="-p"
574 # do a run of newstage only before dinstall is on.
575 function newstage() {
576 log "Processing the newstage queue"
577 UNCHECKED_WITHOUT_LOCK="-p"
581 # Function to update a "statefile" telling people what we are doing
584 # This should be called with the argument(s)
585 # - Status name we want to show.
588 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
589 cat >"${DINSTALLSTATE}" <<EOF
590 Dinstall start: ${DINSTALLBEGIN}
592 Action start: ${RIGHTNOW}
596 # extract changelogs and stuff
597 function changelogs() {
598 log "Extracting changelogs"
599 dak make-changelog -e
600 mkdir -p ${exportpublic}/changelogs
601 cd ${exportpublic}/changelogs
602 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
603 sudo -H -u archvsync /home/archvsync/runmirrors metabdo > ~dak/runmirrors-metadata.log 2>&1 &