1 # Timestamp. Used for dinstall stat graphs
3 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
12 # If we error out this one is called, *FOLLOWED* by cleanup above
14 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
16 subject="ATTENTION ATTENTION!"
17 if [ "${error}" = "false" ]; then
18 subject="${subject} (continued)"
20 subject="${subject} (interrupted)"
22 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
24 cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org
27 ########################################################################
28 # the actual dinstall functions follow #
29 ########################################################################
31 # pushing merkels QA user, part one
33 log "Telling merkels QA user that we start dinstall"
34 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
37 # Create the postgres dump files
38 function pgdump_pre() {
39 log "Creating pre-daily-cron-job backup of projectb database..."
40 pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
43 function pgdump_post() {
44 log "Creating post-daily-cron-job backup of projectb database..."
46 POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
47 pg_dump projectb > $base/backup/dump_$POSTDUMP
48 pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
49 ln -sf $base/backup/dump_$POSTDUMP current
50 ln -sf $base/backup/dumpall_$POSTDUMP currentall
53 # Load the dak-dev projectb
56 echo "drop database projectb" | psql -p 5433 template1
57 cat currentall | psql -p 5433 template1
58 createdb -p 5433 -T template0 projectb
59 fgrep -v '\connect' current | psql -p 5433 projectb
62 # Updating various files
64 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
66 $scriptsdir/update-bugdoctxt
67 $scriptsdir/update-mirrorlists
68 $scriptsdir/update-mailingliststxt
69 $scriptsdir/update-pseudopackages.sh
72 # Process (oldstable)-proposed-updates "NEW" queue
76 dak process-new -a -C COMMENTS >> REPORT || true
80 log "Doing automated p-u-new processing"
84 log "Doing automated o-p-u-new processing"
88 # The first i18n one, syncing new descriptions
90 log "Synchronizing i18n package descriptions"
91 # First sync their newest data
92 cd ${scriptdir}/i18nsync
93 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
95 # Now check if we still know about the packages for which they created the files
96 # is the timestamp signed by us?
97 if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
98 # now read it. As its signed by us we are sure the content is what we expect, no need
99 # to do more here. And we only test -d a directory on it anyway.
100 TSTAMP=$(cat timestamp)
101 # do we have the dir still?
102 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
104 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
105 # Yay, worked, lets copy around
106 for dir in squeeze sid; do
107 if [ -d dists/${dir}/ ]; then
108 cd dists/${dir}/main/i18n
109 rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
111 cd ${scriptdir}/i18nsync
114 echo "ARRRR, bad guys, wrong files, ARRR"
115 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
118 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
119 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
122 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
123 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
128 log "Checking for cruft in overrides"
132 function dominate() {
133 log "Removing obsolete source and binary associations"
137 function filelist() {
138 log "Generating file lists for apt-ftparchive"
139 dak generate-filelist
142 function fingerprints() {
143 log "Not updating fingerprints - scripts needs checking"
145 log "Updating fingerprints"
146 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
149 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
151 if [ -s "${OUTFILE}" ]; then
152 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
153 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
154 To: <debian-project@lists.debian.org>
155 Subject: Debian Maintainers Keyring changes
156 Content-Type: text/plain; charset=utf-8
159 The following changes to the debian-maintainers keyring have just been activated:
163 Debian distribution maintenance software,
164 on behalf of the Keyring maintainers
171 function overrides() {
172 log "Writing overrides into text files"
177 rm -f override.sid.all3
178 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
182 log "Generating package / file mapping"
183 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
186 function packages() {
187 log "Generating Packages and Sources files"
189 GZIP='--rsyncable' ; export GZIP
190 apt-ftparchive generate apt.conf
194 log "Generating pdiff files"
195 dak generate-index-diffs
199 log "Generating Release files"
200 dak generate-releases
203 function dakcleanup() {
204 log "Cleanup old packages/files"
205 dak clean-suites -m 10000
209 function buildd_dir() {
210 # Rebuilt the buildd dir to avoid long times of 403
211 log "Regenerating the buildd incoming dir"
212 STAMP=$(date "+%Y%m%d%H%M")
221 log "Removing any core files ..."
222 find -type f -name core -print0 | xargs -0r rm -v
224 log "Checking permissions on files in the FTP tree ..."
225 find -type f \( \! -perm -444 -o -perm +002 \) -ls
226 find -type d \( \! -perm -555 -o -perm +002 \) -ls
228 log "Checking symlinks ..."
231 log "Creating recursive directory listing ... "
232 rm -f .${FILENAME}.new
233 TZ=UTC ls -lR > .${FILENAME}.new
235 if [ -r ${FILENAME}.gz ] ; then
236 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
237 mv -f .${FILENAME}.new ${FILENAME}
238 rm -f ${FILENAME}.patch.gz
239 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
240 rm -f ${FILENAME}.old.gz
242 mv -f .${FILENAME}.new ${FILENAME}
245 gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
249 function mkmaintainers() {
250 log 'Creating Maintainers index ... '
253 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
254 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
255 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
257 if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
258 log "installing Maintainers ... "
259 mv -f .new-maintainers Maintainers
260 gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
261 mv -f .new-maintainers.gz Maintainers.gz
263 rm -f .new-maintainers
267 function copyoverrides() {
268 log 'Copying override files into public view ...'
270 for ofile in $copyoverrides ; do
272 chmod g+w override.$ofile
276 newofile=override.$ofile.gz
277 rm -f .newover-$ofile.gz
278 pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`"
279 if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then
280 log " installing new $newofile $pc"
281 mv -f .newover-$ofile.gz $newofile
284 rm -f .newover-$ofile.gz
289 function mkfilesindices() {
291 cd $base/ftp/indices/files/components
295 log "Querying projectb..."
296 echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST
299 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
302 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
305 log "Generating sources list"
307 sed -n 's/|$//p' $ARCHLIST
309 find ./dists -maxdepth 1 \! -type d
310 find ./dists \! -type d | grep "/source/"
311 ) | sort -u | gzip --rsyncable -9 > source.list.gz
313 log "Generating arch lists"
315 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
317 (sed -n "s/|$a$//p" $ARCHLIST
318 sed -n 's/|all$//p' $ARCHLIST
321 find ./dists -maxdepth 1 \! -type d
322 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
323 ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
326 log "Generating suite lists"
329 printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb
331 printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb
334 printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb |
335 while read id suite; do
336 [ -e $base/ftp/dists/$suite ] || continue
339 distname=$(cd dists; readlink $suite || echo $suite)
340 find ./dists/$distname \! -type d
341 for distdir in ./dists/*; do
342 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
345 suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,'
346 ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
349 log "Finding everything on the ftp site to generate sundries"
350 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
353 zcat *.list.gz | cat - *.list | sort -u |
354 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
356 log "Generating files list"
359 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
360 cat - sundries.list dists.list project.list docs.list indices.list |
361 sort -u | poolfirst > ../arch-$a.files
365 for dist in sid squeeze; do
366 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
370 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-oldstable.list.gz suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
371 sort -u | poolfirst > ../typical.files
377 function mkchecksums() {
378 dsynclist=$dbdir/dsync.list
379 md5list=$indices/md5sums
381 log -n "Creating md5 / dsync index file ... "
384 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
385 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
386 ${bindir}/dsync-flist -q link-dups $dsynclist || true
390 log "Regenerating \"public\" mirror/ hardlink fun"
392 rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
396 log "Expiring old database dumps..."
398 $scriptsdir/expire_dumps -d . -p -f "dump_*"
401 function transitionsclean() {
402 log "Removing out of date transitions..."
404 dak transitions -c -a
408 # Send a report on NEW/BYHAND packages
409 log "Nagging ftpteam about NEW/BYHAND packages"
410 dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
411 # and one on crufty packages
412 log "Sending information about crufty packages"
413 dak cruft-report > $webdir/cruft-report-daily.txt
414 dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
415 cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
419 log "Updating DM html page"
420 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
424 log "Categorizing uncategorized bugs filed against ftp.debian.org"
429 # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
430 log "Trigger merkel/flotows projectb sync"
431 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
432 # Also trigger flotow, the ftpmaster test box
433 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
437 # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
438 log "Trigger merkels dd accessible parts sync"
439 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
442 function mirrorpush() {
443 log "Starting the mirrorpush"
444 date -u > /srv/ftp.debian.org/web/mirrorstart
445 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
446 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
447 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
451 log "Exporting package data foo for i18n project"
452 STAMP=$(date "+%Y%m%d%H%M")
453 mkdir -p ${scriptdir}/i18n/${STAMP}
454 cd ${scriptdir}/i18n/${STAMP}
455 dak control-suite -l stable > lenny
456 dak control-suite -l testing > squeeze
457 dak control-suite -l unstable > sid
458 echo "${STAMP}" > timestamp
459 gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
463 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
466 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
470 log "Updating stats data"
472 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
473 R --slave --vanilla < $base/misc/ftpstats.R
474 dak stats arch-space > $webdir/arch-space
475 dak stats pkg-nums > $webdir/pkg-nums
478 function aptftpcleanup() {
479 log "Clean up apt-ftparchive's databases"
481 apt-ftparchive -q clean apt.conf
484 function compress() {
485 log "Compress old psql backups"
487 find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
489 find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
490 while read dumpname; do
491 echo "Compressing $dumpname"
492 bzip2 -9fv "$dumpname"
494 find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
495 while read dumpname; do
496 echo "Compressing $dumpname"
497 bzip2 -9fv "$dumpname"
499 finddup -l -d $base/backup
502 function logstats() {
503 $masterdir/tools/logs.py "$1"
506 # save timestamp when we start
507 function savetimestamp() {
508 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
509 echo ${NOW} > "${dbdir}/dinstallstart"
512 function maillogfile() {
513 cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
516 function renamelogfile() {
517 if [ -f "${dbdir}/dinstallstart" ]; then
518 NOW=$(cat "${dbdir}/dinstallstart")
520 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
521 logstats "$logdir/dinstall_${NOW}.log"
522 bzip2 -9 "$logdir/dinstall_${NOW}.log"
524 error "Problem, I don't know when dinstall started, unable to do log statistics."
525 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
527 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
528 bzip2 -9 "$logdir/dinstall_${NOW}.log"
532 function testingsourcelist() {
533 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
536 # do a last run of process-unchecked before dinstall is on.
537 function process_unchecked() {
538 log "Processing the unchecked queue"
539 UNCHECKED_WITHOUT_LOCK="-p"
544 # Function to update a "statefile" telling people what we are doing
547 # This should be called with the argument(s)
548 # - Status name we want to show.
551 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
552 cat >"${DINSTALLSTATE}" <<EOF
553 Dinstall start: ${DINSTALLBEGIN}
555 Action start: ${RIGHTNOW}