2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
10 rm -f ${LOCK_ACCEPTED}
13 # If we error out this one is called, *FOLLOWED* by cleanup above
15 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
17 subject="ATTENTION ATTENTION!"
18 if [ "${error}" = "false" ]; then
19 subject="${subject} (continued)"
21 subject="${subject} (interrupted)"
23 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
25 cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org
28 ########################################################################
29 # the actual dinstall functions follow #
30 ########################################################################
32 # pushing merkels QA user, part one
34 log "Telling merkels QA user that we start dinstall"
35 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
38 # Create the postgres dump files
39 function pgdump_pre() {
40 log "Creating pre-daily-cron-job backup of projectb database..."
41 pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
44 function pgdump_post() {
45 log "Creating post-daily-cron-job backup of projectb database..."
47 POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
48 pg_dump projectb > $base/backup/dump_$POSTDUMP
49 pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
50 ln -sf $base/backup/dump_$POSTDUMP current
51 ln -sf $base/backup/dumpall_$POSTDUMP currentall
54 # Load the dak-dev projectb
57 echo "drop database projectb" | psql -p 5433 template1
58 cat currentall | psql -p 5433 template1
59 createdb -p 5433 -T template0 projectb
60 fgrep -v '\connect' current | psql -p 5433 projectb
63 # Updating various files
65 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
67 $scriptsdir/update-bugdoctxt
68 $scriptsdir/update-mirrorlists
69 $scriptsdir/update-mailingliststxt
70 $scriptsdir/update-pseudopackages.sh
73 # Process (oldstable)-proposed-updates "NEW" queue
76 dak process-policy $1 | tee -a REPORT | mail -e -s "NEW changes in $1" debian-release@lists.debian.org
80 log "Doing automated p-u-new processing"
81 cd "${queuedir}/p-u-new"
85 log "Doing automated o-p-u-new processing"
86 cd "${queuedir}/o-p-u-new"
90 # The first i18n one, syncing new descriptions
92 log "Synchronizing i18n package descriptions"
93 # First sync their newest data
94 cd ${scriptdir}/i18nsync
95 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
97 # Now check if we still know about the packages for which they created the files
98 # is the timestamp signed by us?
99 if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
100 # now read it. As its signed by us we are sure the content is what we expect, no need
101 # to do more here. And we only test -d a directory on it anyway.
102 TSTAMP=$(cat timestamp)
103 # do we have the dir still?
104 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
106 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
107 # Yay, worked, lets copy around
108 for dir in squeeze sid; do
109 if [ -d dists/${dir}/ ]; then
110 cd dists/${dir}/main/i18n
111 rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
113 cd ${scriptdir}/i18nsync
116 echo "ARRRR, bad guys, wrong files, ARRR"
117 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
120 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
121 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
124 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
125 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
130 log "Checking for cruft in overrides"
134 function dominate() {
135 log "Removing obsolete source and binary associations"
139 function filelist() {
140 log "Generating file lists for apt-ftparchive"
141 dak generate-filelist
144 function fingerprints() {
145 log "Not updating fingerprints - scripts needs checking"
147 log "Updating fingerprints"
148 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
151 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
153 if [ -s "${OUTFILE}" ]; then
154 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
155 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
156 To: <debian-project@lists.debian.org>
157 Subject: Debian Maintainers Keyring changes
158 Content-Type: text/plain; charset=utf-8
161 The following changes to the debian-maintainers keyring have just been activated:
165 Debian distribution maintenance software,
166 on behalf of the Keyring maintainers
173 function overrides() {
174 log "Writing overrides into text files"
179 rm -f override.sid.all3
180 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
184 log "Generating package / file mapping"
185 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
188 function packages() {
189 log "Generating Packages and Sources files"
191 GZIP='--rsyncable' ; export GZIP
192 apt-ftparchive generate apt.conf
196 log "Generating pdiff files"
197 dak generate-index-diffs
201 log "Generating Release files"
202 dak generate-releases
205 function dakcleanup() {
206 log "Cleanup old packages/files"
207 dak clean-suites -m 10000
211 function buildd_dir() {
212 # Rebuilt the buildd dir to avoid long times of 403
213 log "Regenerating the buildd incoming dir"
214 STAMP=$(date "+%Y%m%d%H%M")
223 log "Removing any core files ..."
224 find -type f -name core -print0 | xargs -0r rm -v
226 log "Checking permissions on files in the FTP tree ..."
227 find -type f \( \! -perm -444 -o -perm +002 \) -ls
228 find -type d \( \! -perm -555 -o -perm +002 \) -ls
230 log "Checking symlinks ..."
233 log "Creating recursive directory listing ... "
234 rm -f .${FILENAME}.new
235 TZ=UTC ls -lR > .${FILENAME}.new
237 if [ -r ${FILENAME}.gz ] ; then
238 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
239 mv -f .${FILENAME}.new ${FILENAME}
240 rm -f ${FILENAME}.patch.gz
241 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
242 rm -f ${FILENAME}.old.gz
244 mv -f .${FILENAME}.new ${FILENAME}
247 gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
251 function mkmaintainers() {
252 log 'Creating Maintainers index ... '
255 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
256 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
257 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
259 if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
260 log "installing Maintainers ... "
261 mv -f .new-maintainers Maintainers
262 gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
263 mv -f .new-maintainers.gz Maintainers.gz
265 rm -f .new-maintainers
269 function mkuploaders() {
270 log 'Creating Uploaders index ... '
273 dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
274 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
275 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders
277 if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then
278 log "installing Uploaders ... "
279 mv -f .new-uploaders Uploaders
280 gzip --rsyncable -9v <Uploaders >.new-uploaders.gz
281 mv -f .new-uploaders.gz Uploaders.gz
287 function copyoverrides() {
288 log 'Copying override files into public view ...'
290 for ofile in $copyoverrides ; do
292 chmod g+w override.$ofile
296 newofile=override.$ofile.gz
297 rm -f .newover-$ofile.gz
298 pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`"
299 if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then
300 log " installing new $newofile $pc"
301 mv -f .newover-$ofile.gz $newofile
304 rm -f .newover-$ofile.gz
309 function mkfilesindices() {
311 cd $base/ftp/indices/files/components
315 log "Querying projectb..."
316 echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST
319 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
322 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
325 log "Generating sources list"
327 sed -n 's/|$//p' $ARCHLIST
329 find ./dists -maxdepth 1 \! -type d
330 find ./dists \! -type d | grep "/source/"
331 ) | sort -u | gzip --rsyncable -9 > source.list.gz
333 log "Generating arch lists"
335 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
337 (sed -n "s/|$a$//p" $ARCHLIST
338 sed -n 's/|all$//p' $ARCHLIST
341 find ./dists -maxdepth 1 \! -type d
342 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
343 ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
346 log "Generating suite lists"
349 printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb
351 printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb
354 printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb |
355 while read id suite; do
356 [ -e $base/ftp/dists/$suite ] || continue
359 distname=$(cd dists; readlink $suite || echo $suite)
360 find ./dists/$distname \! -type d
361 for distdir in ./dists/*; do
362 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
365 suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,'
366 ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
369 log "Finding everything on the ftp site to generate sundries"
370 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
373 zcat *.list.gz | cat - *.list | sort -u |
374 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
376 log "Generating files list"
379 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
380 cat - sundries.list dists.list project.list docs.list indices.list |
381 sort -u | poolfirst > ../arch-$a.files
385 for dist in sid squeeze; do
386 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
390 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
391 sort -u | poolfirst > ../typical.files
397 function mkchecksums() {
398 dsynclist=$dbdir/dsync.list
399 md5list=$indices/md5sums
401 log -n "Creating md5 / dsync index file ... "
404 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
405 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
406 ${bindir}/dsync-flist -q link-dups $dsynclist || true
410 log "Regenerating \"public\" mirror/ hardlink fun"
411 DATE_SERIAL=$(date +"%Y%m%d01")
412 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
413 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
414 SERIAL="$DATE_SERIAL"
416 SERIAL="$FILESOAPLUS1"
418 date -u > ${TRACEFILE}
419 echo "Using dak v1" >> ${TRACEFILE}
420 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
421 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
423 rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
427 log "Expiring old database dumps..."
429 $scriptsdir/expire_dumps -d . -p -f "dump_*"
432 function transitionsclean() {
433 log "Removing out of date transitions..."
435 dak transitions -c -a
439 # Send a report on NEW/BYHAND packages
440 log "Nagging ftpteam about NEW/BYHAND packages"
441 dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
442 # and one on crufty packages
443 log "Sending information about crufty packages"
444 dak cruft-report > $webdir/cruft-report-daily.txt
445 dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
446 cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
450 log "Updating DM html page"
451 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
455 log "Categorizing uncategorized bugs filed against ftp.debian.org"
460 # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
461 log "Trigger merkel/flotows projectb sync"
462 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
463 # Also trigger flotow, the ftpmaster test box
464 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
468 # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
469 log "Trigger merkels dd accessible parts sync"
470 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
473 function mirrorpush() {
474 log "Starting the mirrorpush"
475 date -u > /srv/ftp.debian.org/web/mirrorstart
476 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
477 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
478 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
482 log "Exporting package data foo for i18n project"
483 STAMP=$(date "+%Y%m%d%H%M")
484 mkdir -p ${scriptdir}/i18n/${STAMP}
485 cd ${scriptdir}/i18n/${STAMP}
486 dak control-suite -l stable > lenny
487 dak control-suite -l testing > squeeze
488 dak control-suite -l unstable > sid
489 echo "${STAMP}" > timestamp
490 gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
494 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
497 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
501 log "Updating stats data"
503 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
504 R --slave --vanilla < $base/misc/ftpstats.R
505 dak stats arch-space > $webdir/arch-space
506 dak stats pkg-nums > $webdir/pkg-nums
509 function aptftpcleanup() {
510 log "Clean up apt-ftparchive's databases"
512 apt-ftparchive -q clean apt.conf
515 function compress() {
516 log "Compress old psql backups"
518 find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
520 find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
521 while read dumpname; do
522 echo "Compressing $dumpname"
523 bzip2 -9fv "$dumpname"
525 find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
526 while read dumpname; do
527 echo "Compressing $dumpname"
528 bzip2 -9fv "$dumpname"
530 finddup -l -d $base/backup
533 function logstats() {
534 $masterdir/tools/logs.py "$1"
537 # save timestamp when we start
538 function savetimestamp() {
539 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
540 echo ${NOW} > "${dbdir}/dinstallstart"
543 function maillogfile() {
544 cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
547 function renamelogfile() {
548 if [ -f "${dbdir}/dinstallstart" ]; then
549 NOW=$(cat "${dbdir}/dinstallstart")
551 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
552 logstats "$logdir/dinstall_${NOW}.log"
553 bzip2 -9 "$logdir/dinstall_${NOW}.log"
555 error "Problem, I don't know when dinstall started, unable to do log statistics."
556 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
558 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
559 bzip2 -9 "$logdir/dinstall_${NOW}.log"
563 function testingsourcelist() {
564 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
567 # do a last run of process-unchecked before dinstall is on.
568 function process_unchecked() {
569 log "Processing the unchecked queue"
570 UNCHECKED_WITHOUT_LOCK="-p"
575 # do a run of newstage only before dinstall is on.
576 function newstage() {
577 log "Processing the newstage queue"
578 UNCHECKED_WITHOUT_LOCK="-p"
582 # Function to update a "statefile" telling people what we are doing
585 # This should be called with the argument(s)
586 # - Status name we want to show.
589 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
590 cat >"${DINSTALLSTATE}" <<EOF
591 Dinstall start: ${DINSTALLBEGIN}
593 Action start: ${RIGHTNOW}