2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
13 function remove_all_locks() {
14 rm -f $LOCK_DAILY $LOCK_ACCEPTED
17 # If we error out this one is called, *FOLLOWED* by cleanup above
19 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
21 subject="ATTENTION ATTENTION!"
22 if [ "${error}" = "false" ]; then
23 subject="${subject} (continued)"
25 subject="${subject} (interrupted)"
27 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
29 if [ -r "${STAGEFILE}.log" ]; then
30 cat "${STAGEFILE}.log"
32 echo "file ${STAGEFILE}.log does not exist, sorry"
33 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
36 ########################################################################
37 # the actual dinstall functions follow #
38 ########################################################################
40 # pushing merkels QA user, part one
42 log "Telling QA user that we start dinstall"
43 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
46 # Updating various files
48 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
50 $scriptsdir/update-bugdoctxt
51 $scriptsdir/update-mirrorlists
52 $scriptsdir/update-mailingliststxt
53 $scriptsdir/update-pseudopackages.sh
56 # The first i18n one, syncing new descriptions
58 log "Synchronizing i18n package descriptions"
59 # First sync their newest data
60 cd ${scriptdir}/i18nsync
61 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
63 # Now check if we still know about the packages for which they created the files
64 # is the timestamp signed by us?
65 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
66 # now read it. As its signed by us we are sure the content is what we expect, no need
67 # to do more here. And we only test -d a directory on it anyway.
68 TSTAMP=$(cat timestamp)
69 # do we have the dir still?
70 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
72 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
73 # Yay, worked, lets copy around
74 for dir in wheezy sid; do
75 if [ -d dists/${dir}/ ]; then
76 cd dists/${dir}/main/i18n
77 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
79 cd ${scriptdir}/i18nsync
82 echo "ARRRR, bad guys, wrong files, ARRR"
83 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
86 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
87 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
90 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
91 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
96 log "Checking for cruft in overrides"
100 function dominate() {
101 log "Removing obsolete source and binary associations"
105 function filelist() {
106 log "Generating file lists for apt-ftparchive"
107 dak generate-filelist
110 function fingerprints() {
111 log "Updating fingerprints"
112 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
115 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
117 if [ -s "${OUTFILE}" ]; then
118 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
119 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
120 To: <debian-project@lists.debian.org>
121 Subject: Debian Maintainers Keyring changes
122 Content-Type: text/plain; charset=utf-8
126 The following changes to the debian-maintainers keyring have just been activated:
130 Debian distribution maintenance software,
131 on behalf of the Keyring maintainers
138 function overrides() {
139 log "Writing overrides into text files"
144 rm -f override.sid.all3
145 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
151 log "Generating package / file mapping"
152 for archive in "${public_archives[@]}"; do
153 archiveroot="$(get_archiveroot "${archive}")"
154 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
158 function packages() {
159 log "Generating Packages and Sources files"
160 for archive in "${public_archives[@]}"; do
161 dak generate-packages-sources2 -a "${archive}"
162 dak contents generate -a "${archive}"
167 log "Generating pdiff files"
168 dak generate-index-diffs
172 # XXX: disable once we can remove i18n/Index (#649314)
173 log "Generating i18n/Index"
176 for dist in testing unstable experimental proposed-updates testing-proposed-updates; do
177 $scriptsdir/generate-i18n-Index $dist;
181 log "Generating Release files"
182 for archive in "${public_archives[@]}"; do
183 dak generate-releases -a "${archive}"
187 function dakcleanup() {
188 log "Cleanup old packages/files"
189 dak clean-suites -m 10000
190 dak clean-queues -i "$unchecked"
193 function buildd_dir() {
194 # Rebuilt the buildd dir to avoid long times of 403
195 log "Regenerating the buildd incoming dir"
196 STAMP=$(date "+%Y%m%d%H%M")
204 for archive in "${public_archives[@]}"; do
205 archiveroot="$(get_archiveroot "${archive}")"
208 log "Removing any core files ..."
209 find -type f -name core -print -delete
211 log "Checking symlinks ..."
214 log "Creating recursive directory listing ... "
216 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
220 function mkmaintainers() {
224 log 'Creating Maintainers index ... '
226 for archive in "${public_archives[@]}"; do
227 archiveroot="$(get_archiveroot "${archive}")"
228 indices="${archiveroot}/indices"
229 if ! [ -d "${indices}" ]; then
234 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
235 gzip -9v --rsyncable <Maintainers >Maintainers.gz
236 gzip -9v --rsyncable <Uploaders >Uploaders.gz
240 function copyoverrides() {
241 log 'Copying override files into public view ...'
243 for ofile in ${overridedir}/override.{squeeze,wheezy,sid}.{,extra.}{main,contrib,non-free}*; do
245 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
246 chmod g+w ${indices}/${bname}.gz
250 function mkfilesindices() {
253 cd $base/ftp/indices/files/components
257 log "Querying postgres"
259 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
261 JOIN files_archive_map af ON f.id = af.file_id
262 JOIN component c ON af.component_id = c.id
263 JOIN archive ON af.archive_id = archive.id
266 JOIN architecture a ON b.architecture = a.id)
268 WHERE archive.name = 'ftp-master'
269 ORDER BY path, arch_string
271 psql -At -c "$query" >$ARCHLIST
274 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
277 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
280 log "Generating sources list"
282 sed -n 's/|$//p' $ARCHLIST
284 find ./dists -maxdepth 1 \! -type d
285 find ./dists \! -type d | grep "/source/"
286 ) | sort -u | gzip -9 > source.list.gz
288 log "Generating arch lists"
290 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
292 (sed -n "s/|$a$//p" $ARCHLIST
293 sed -n 's/|all$//p' $ARCHLIST
296 find ./dists -maxdepth 1 \! -type d
297 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
298 ) | sort -u | gzip -9 > arch-$a.list.gz
301 log "Generating suite lists"
304 local suite_id="$(printf %d $1)"
307 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
309 (SELECT sa.source AS source
310 FROM src_associations sa
311 WHERE sa.suite = $suite_id
314 FROM extra_src_references esr
315 JOIN bin_associations ba ON esr.bin_id = ba.bin
316 WHERE ba.suite = $suite_id
318 SELECT b.source AS source
319 FROM bin_associations ba
320 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
321 JOIN dsc_files df ON s.source = df.source
322 JOIN files f ON df.file = f.id
323 JOIN files_archive_map af ON f.id = af.file_id
324 JOIN component c ON af.component_id = c.id
325 JOIN archive ON af.archive_id = archive.id
326 WHERE archive.name = 'ftp-master'
328 psql -F' ' -A -t -c "$query"
331 SELECT './pool/' || c.name || '/' || f.filename
332 FROM bin_associations ba
333 JOIN binaries b ON ba.bin = b.id
334 JOIN files f ON b.file = f.id
335 JOIN files_archive_map af ON f.id = af.file_id
336 JOIN component c ON af.component_id = c.id
337 JOIN archive ON af.archive_id = archive.id
338 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
340 psql -F' ' -A -t -c "$query"
343 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
344 while read id suite; do
345 [ -e $base/ftp/dists/$suite ] || continue
348 distname=$(cd dists; readlink $suite || echo $suite)
349 find ./dists/$distname \! -type d
350 for distdir in ./dists/*; do
351 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
355 ) | sort -u | gzip -9 > suite-${suite}.list.gz
358 log "Finding everything on the ftp site to generate sundries"
359 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
362 zcat *.list.gz | cat - *.list | sort -u |
363 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
365 log "Generating files list"
368 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
369 cat - sundries.list dists.list project.list docs.list indices.list |
370 sort -u | poolfirst > ../arch-$a.files
374 for dist in sid wheezy; do
375 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
379 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-wheezy.list.gz) |
380 sort -u | poolfirst > ../typical.files
387 function mkchecksums() {
388 local archiveroot dsynclist md5list
390 for archive in "${public_archives[@]}"; do
391 archiveroot="$(get_archiveroot "${archive}")"
392 dsynclist=$dbdir/dsync.${archive}.list
393 md5list=${archiveroot}/indices/md5sums
395 log -n "Creating md5 / dsync index file for ${archive}... "
398 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
399 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
400 ${bindir}/dsync-flist -q link-dups $dsynclist || true
407 log "Regenerating \"public\" mirror/ hardlink fun"
408 DATE_SERIAL=$(date +"%Y%m%d01")
409 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
410 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
411 SERIAL="$DATE_SERIAL"
413 SERIAL="$FILESOAPLUS1"
415 date -u > ${TRACEFILE}
416 echo "Using dak v1" >> ${TRACEFILE}
417 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
418 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
420 # Ugly "hack", but hey, it does what we want.
421 cp ${TRACEFILE} ${TRACEFILE_BDO}
423 for archive in "${public_archives[@]}"; do
424 archiveroot="$(get_archiveroot "${archive}")"
425 mirrordir="${archiveroot}/../mirror"
427 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
432 log "Expiring old database dumps..."
434 $scriptsdir/expire_dumps -d . -p -f "dump_*"
437 function transitionsclean() {
438 log "Removing out of date transitions..."
440 dak transitions -c -a
444 log "Updating DM permissions page"
445 dak acl export-per-source dm >$exportdir/dm.txt
449 log "Categorizing uncategorized bugs filed against ftp.debian.org"
453 function ddaccess() {
454 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
455 log "Trigger dd accessible parts sync including ftp dir"
456 ${scriptsdir}/sync-dd ries-sync ries-sync1 ries-sync2 pool
459 function mirrorpush() {
460 log "Checking the public archive copy"
461 cd ${mirrordir}/dists
464 for release in $(find . -name "InRelease"); do
465 echo "Processing: ${release}"
466 subdir=${release%/InRelease}
467 while read SHASUM SIZE NAME; do
468 if ! [ -f "${subdir}/${NAME}" ]; then
469 bname=$(basename ${NAME})
470 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
471 # We don't keep unpacked files, don't check for their existance.
472 # We might want to go and check their unpacked shasum, but right now
473 # I don't care. I believe it should be enough if all the packed shasums
477 broken=$(( broken + 1 ))
478 echo "File ${subdir}/${NAME} is missing"
482 # We do have symlinks in the tree (see the contents files currently).
483 # So we use "readlink -f" to check the size of the target, as thats basically
484 # what gen-releases does
485 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
486 if [ ${fsize} -ne ${SIZE} ]; then
487 broken=$(( broken + 1 ))
488 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
492 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
493 fshasum=${fshasum%% *}
494 if [ "${fshasum}" != "${SHASUM}" ]; then
495 broken=$(( broken + 1 ))
496 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
499 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
502 if [ $broken -gt 0 ]; then
503 log_error "Trouble with the public mirror, found ${broken} errors"
507 log "Starting the mirrorpush"
508 date -u > /srv/ftp.debian.org/web/mirrorstart
509 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
510 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
511 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
512 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
516 log "Exporting package data foo for i18n project"
517 STAMP=$(date "+%Y%m%d%H%M")
518 mkdir -p ${scriptdir}/i18n/${STAMP}
519 cd ${scriptdir}/i18n/${STAMP}
520 for suite in stable testing unstable; do
521 codename=$(dak admin s show ${suite}|grep '^Codename')
522 codename=${codename##* }
523 echo "Codename is ${codename}"
524 dak control-suite -l ${suite} >${codename}
526 echo "${STAMP}" > timestamp
527 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
531 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
534 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
538 log "Updating stats data"
540 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
541 R --slave --vanilla < $base/misc/ftpstats.R
542 dak stats arch-space > $webdir/arch-space
543 dak stats pkg-nums > $webdir/pkg-nums
546 function aptftpcleanup() {
547 log "Clean up apt-ftparchive's databases"
549 apt-ftparchive -q clean apt.conf
552 function cleantransactions() {
553 log "Cleanup transaction ids older than 3 months"
555 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
558 function logstats() {
559 $masterdir/tools/logs.py "$1"
562 # save timestamp when we start
563 function savetimestamp() {
564 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
565 echo ${NOW} > "${dbdir}/dinstallstart"
568 function maillogfile() {
569 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
572 function renamelogfile() {
573 if [ -f "${dbdir}/dinstallstart" ]; then
574 NOW=$(cat "${dbdir}/dinstallstart")
576 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
577 logstats "$logdir/dinstall_${NOW}.log"
578 bzip2 -9 "$logdir/dinstall_${NOW}.log"
580 error "Problem, I don't know when dinstall started, unable to do log statistics."
581 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
583 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
584 bzip2 -9 "$logdir/dinstall_${NOW}.log"
588 function testingsourcelist() {
589 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
592 # do a last run of process-unchecked before dinstall is on.
593 function process_unchecked() {
594 log "Processing the unchecked queue"
595 UNCHECKED_WITHOUT_LOCK="-p"
600 # Function to update a "statefile" telling people what we are doing
603 # This should be called with the argument(s)
604 # - Status name we want to show.
607 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
608 cat >"${DINSTALLSTATE}" <<EOF
609 Dinstall start: ${DINSTALLBEGIN}
611 Action start: ${RIGHTNOW}
615 # extract changelogs and stuff
616 function changelogs() {
617 log "Extracting changelogs"
618 #dak make-changelog -e -a ftp-master
619 mkdir -p ${exportpublic}/changelogs
620 cd ${exportpublic}/changelogs
621 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
622 sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 &
625 function gitpdiff() {
626 # Might be that we want to change this to have more than one git repository.
627 # Advantage of one is that we do not need much space in terms of storage in git itself,
628 # git gc is pretty good on our input.
629 # But it might be faster. Well, lets test.
630 log "Adjusting the git tree for pdiffs"
633 # The regex needs the architectures seperated with \|
634 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
636 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
638 # Also, we only want contents, packages and sources.
639 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
648 # Second, add all there is into git
651 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
653 TAGD=$(date +%Y-%m-%d-%H-%M)
654 git commit -m "Commit of ${COMD}"