2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
10 rm -f ${LOCK_ACCEPTED}
13 # If we error out this one is called, *FOLLOWED* by cleanup above
15 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
17 subject="ATTENTION ATTENTION!"
18 if [ "${error}" = "false" ]; then
19 subject="${subject} (continued)"
21 subject="${subject} (interrupted)"
23 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
25 if [ -r "${STAGEFILE}.log" ]; then
26 cat "${STAGEFILE}.log"
28 echo "file ${STAGEFILE}.log does not exist, sorry"
29 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
32 ########################################################################
33 # the actual dinstall functions follow #
34 ########################################################################
36 # pushing merkels QA user, part one
38 log "Telling merkels QA user that we start dinstall"
39 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
42 # Create the postgres dump files
43 function pgdump_pre() {
44 log "Creating pre-daily-cron-job backup of $PGDATABASE database..."
45 pg_dump > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
48 function pgdump_post() {
49 log "Creating post-daily-cron-job backup of $PGDATABASE database..."
51 POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
52 pg_dump > $base/backup/dump_$POSTDUMP
53 #pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
54 ln -sf $base/backup/dump_$POSTDUMP current
55 #ln -sf $base/backup/dumpall_$POSTDUMP currentall
58 # Load the dak-dev projectb
60 # Make sure to unset any possible psql variables so we don't drop the wrong
61 # f****** database by accident
71 echo "drop database projectb" | psql -p 5434 template1
72 #cat currentall | psql -p 5433 template1
73 createdb -p 5434 -T template1 projectb
74 fgrep -v '\connect' current | psql -p 5434 projectb
77 # Updating various files
79 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
81 $scriptsdir/update-bugdoctxt
82 $scriptsdir/update-mirrorlists
83 $scriptsdir/update-mailingliststxt
84 $scriptsdir/update-pseudopackages.sh
87 # Process (oldstable)-proposed-updates "NEW" queue
90 dak process-policy $1 | tee -a REPORT | mail -a "X-Debian: DAK" -e -s "NEW changes in $1" debian-release@lists.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
94 log "Doing automated p-u-new processing"
95 cd "${queuedir}/p-u-new"
99 log "Doing automated o-p-u-new processing"
100 cd "${queuedir}/o-p-u-new"
104 # The first i18n one, syncing new descriptions
106 log "Synchronizing i18n package descriptions"
107 # First sync their newest data
108 cd ${scriptdir}/i18nsync
109 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
111 # Now check if we still know about the packages for which they created the files
112 # is the timestamp signed by us?
113 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
114 # now read it. As its signed by us we are sure the content is what we expect, no need
115 # to do more here. And we only test -d a directory on it anyway.
116 TSTAMP=$(cat timestamp)
117 # do we have the dir still?
118 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
120 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
121 # Yay, worked, lets copy around
122 for dir in squeeze sid; do
123 if [ -d dists/${dir}/ ]; then
124 cd dists/${dir}/main/i18n
125 rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
127 cd ${scriptdir}/i18nsync
130 echo "ARRRR, bad guys, wrong files, ARRR"
131 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
134 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
135 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
138 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
139 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
144 log "Checking for cruft in overrides"
148 function dominate() {
149 log "Removing obsolete source and binary associations"
153 function filelist() {
154 log "Generating file lists for apt-ftparchive"
155 dak generate-filelist
158 function fingerprints() {
159 log "Not updating fingerprints - scripts needs checking"
161 log "Updating fingerprints"
162 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
165 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
167 if [ -s "${OUTFILE}" ]; then
168 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
169 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
170 To: <debian-project@lists.debian.org>
171 Subject: Debian Maintainers Keyring changes
172 Content-Type: text/plain; charset=utf-8
176 The following changes to the debian-maintainers keyring have just been activated:
180 Debian distribution maintenance software,
181 on behalf of the Keyring maintainers
188 function overrides() {
189 log "Writing overrides into text files"
194 rm -f override.sid.all3
195 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
199 log "Generating package / file mapping"
200 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
203 function packages() {
204 log "Generating Packages and Sources files"
206 GZIP='--rsyncable' ; export GZIP
207 #apt-ftparchive generate apt.conf
208 dak generate-packages-sources
212 log "Generating pdiff files"
213 dak generate-index-diffs
217 log "Generating Release files"
218 dak generate-releases
221 function dakcleanup() {
222 log "Cleanup old packages/files"
223 dak clean-suites -m 10000
227 function buildd_dir() {
228 # Rebuilt the buildd dir to avoid long times of 403
229 log "Regenerating the buildd incoming dir"
230 STAMP=$(date "+%Y%m%d%H%M")
239 log "Removing any core files ..."
240 find -type f -name core -print0 | xargs -0r rm -v
242 log "Checking permissions on files in the FTP tree ..."
243 find -type f \( \! -perm -444 -o -perm +002 \) -ls
244 find -type d \( \! -perm -555 -o -perm +002 \) -ls
246 log "Checking symlinks ..."
249 log "Creating recursive directory listing ... "
250 rm -f .${FILENAME}.new
251 TZ=UTC ls -lR > .${FILENAME}.new
253 if [ -r ${FILENAME}.gz ] ; then
254 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
255 mv -f .${FILENAME}.new ${FILENAME}
256 rm -f ${FILENAME}.patch.gz
257 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
258 rm -f ${FILENAME}.old.gz
260 mv -f .${FILENAME}.new ${FILENAME}
263 gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
267 function mkmaintainers() {
268 log 'Creating Maintainers index ... '
271 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
272 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
273 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
275 if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
276 log "installing Maintainers ... "
277 mv -f .new-maintainers Maintainers
278 gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
279 mv -f .new-maintainers.gz Maintainers.gz
281 rm -f .new-maintainers
285 function mkuploaders() {
286 log 'Creating Uploaders index ... '
289 dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
290 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
291 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders
293 if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then
294 log "installing Uploaders ... "
295 mv -f .new-uploaders Uploaders
296 gzip --rsyncable -9v <Uploaders >.new-uploaders.gz
297 mv -f .new-uploaders.gz Uploaders.gz
303 function copyoverrides() {
304 log 'Copying override files into public view ...'
306 for ofile in $copyoverrides ; do
308 chmod g+w override.$ofile
312 newofile=override.$ofile.gz
313 rm -f .newover-$ofile.gz
314 pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`"
315 if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then
316 log " installing new $newofile $pc"
317 mv -f .newover-$ofile.gz $newofile
320 rm -f .newover-$ofile.gz
325 function mkfilesindices() {
327 cd $base/ftp/indices/files/components
331 log "Querying $PGDATABASE..."
332 echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST
335 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
338 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
341 log "Generating sources list"
343 sed -n 's/|$//p' $ARCHLIST
345 find ./dists -maxdepth 1 \! -type d
346 find ./dists \! -type d | grep "/source/"
347 ) | sort -u | gzip --rsyncable -9 > source.list.gz
349 log "Generating arch lists"
351 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
353 (sed -n "s/|$a$//p" $ARCHLIST
354 sed -n 's/|all$//p' $ARCHLIST
357 find ./dists -maxdepth 1 \! -type d
358 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
359 ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
362 log "Generating suite lists"
365 printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t
367 printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t
370 printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At |
371 while read id suite; do
372 [ -e $base/ftp/dists/$suite ] || continue
375 distname=$(cd dists; readlink $suite || echo $suite)
376 find ./dists/$distname \! -type d
377 for distdir in ./dists/*; do
378 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
381 suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,'
382 ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
385 log "Finding everything on the ftp site to generate sundries"
386 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
389 zcat *.list.gz | cat - *.list | sort -u |
390 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
392 log "Generating files list"
395 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
396 cat - sundries.list dists.list project.list docs.list indices.list |
397 sort -u | poolfirst > ../arch-$a.files
401 for dist in sid squeeze; do
402 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
406 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
407 sort -u | poolfirst > ../typical.files
413 function mkchecksums() {
414 dsynclist=$dbdir/dsync.list
415 md5list=$indices/md5sums
417 log -n "Creating md5 / dsync index file ... "
420 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
421 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
422 ${bindir}/dsync-flist -q link-dups $dsynclist || true
426 log "Regenerating \"public\" mirror/ hardlink fun"
427 DATE_SERIAL=$(date +"%Y%m%d01")
428 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
429 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
430 SERIAL="$DATE_SERIAL"
432 SERIAL="$FILESOAPLUS1"
434 date -u > ${TRACEFILE}
435 echo "Using dak v1" >> ${TRACEFILE}
436 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
437 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
439 rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
443 log "Expiring old database dumps..."
445 $scriptsdir/expire_dumps -d . -p -f "dump_*"
448 function transitionsclean() {
449 log "Removing out of date transitions..."
451 dak transitions -c -a
455 log "Updating DM html page"
456 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
460 log "Categorizing uncategorized bugs filed against ftp.debian.org"
465 # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
466 log "Trigger merkel/flotows $PGDATABASE sync"
467 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
468 # Also trigger flotow, the ftpmaster test box
469 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
473 # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
474 log "Trigger merkels dd accessible parts sync"
475 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
478 function mirrorpush() {
479 log "Starting the mirrorpush"
480 date -u > /srv/ftp.debian.org/web/mirrorstart
481 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
482 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
483 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
487 log "Exporting package data foo for i18n project"
488 STAMP=$(date "+%Y%m%d%H%M")
489 mkdir -p ${scriptdir}/i18n/${STAMP}
490 cd ${scriptdir}/i18n/${STAMP}
491 dak control-suite -l stable > lenny
492 dak control-suite -l testing > squeeze
493 dak control-suite -l unstable > sid
494 echo "${STAMP}" > timestamp
495 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
499 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
502 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
506 log "Updating stats data"
508 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
509 R --slave --vanilla < $base/misc/ftpstats.R
510 dak stats arch-space > $webdir/arch-space
511 dak stats pkg-nums > $webdir/pkg-nums
514 function aptftpcleanup() {
515 log "Clean up apt-ftparchive's databases"
517 apt-ftparchive -q clean apt.conf
520 function compress() {
521 log "Compress old psql backups"
523 find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
525 find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
526 while read dumpname; do
527 echo "Compressing $dumpname"
528 bzip2 -9fv "$dumpname"
530 find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
531 while read dumpname; do
532 echo "Compressing $dumpname"
533 bzip2 -9fv "$dumpname"
535 finddup -l -d $base/backup
538 function logstats() {
539 $masterdir/tools/logs.py "$1"
542 # save timestamp when we start
543 function savetimestamp() {
544 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
545 echo ${NOW} > "${dbdir}/dinstallstart"
548 function maillogfile() {
549 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
552 function renamelogfile() {
553 if [ -f "${dbdir}/dinstallstart" ]; then
554 NOW=$(cat "${dbdir}/dinstallstart")
556 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
557 logstats "$logdir/dinstall_${NOW}.log"
558 bzip2 -9 "$logdir/dinstall_${NOW}.log"
560 error "Problem, I don't know when dinstall started, unable to do log statistics."
561 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
563 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
564 bzip2 -9 "$logdir/dinstall_${NOW}.log"
568 function testingsourcelist() {
569 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
572 # do a last run of process-unchecked before dinstall is on.
573 function process_unchecked() {
574 log "Processing the unchecked queue"
575 UNCHECKED_WITHOUT_LOCK="-p"
580 # do a run of newstage only before dinstall is on.
581 function newstage() {
582 log "Processing the newstage queue"
583 UNCHECKED_WITHOUT_LOCK="-p"
587 # Function to update a "statefile" telling people what we are doing
590 # This should be called with the argument(s)
591 # - Status name we want to show.
594 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
595 cat >"${DINSTALLSTATE}" <<EOF
596 Dinstall start: ${DINSTALLBEGIN}
598 Action start: ${RIGHTNOW}
602 # extract changelogs and stuff
603 function changelogs() {
604 log "Extracting changelogs"
605 dak make-changelog -e
606 mkdir -p ${exportpublic}/changelogs
607 cd ${exportpublic}/changelogs
608 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
609 sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 &