2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
100 # Syncing AppStream/DEP-11 data
102 log "Synchronizing AppStream metadata"
103 # First sync their newest data
104 mkdir -p ${scriptdir}/dep11
105 cd ${scriptdir}/dep11
106 rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true
109 if ${scriptsdir}/dep11-basic-validate.py . ${scriptdir}/dep11/; then
110 # Yay, worked, lets copy around
111 for dir in stretch sid; do
112 if [ -d ${dir}/ ]; then
113 for comp in main contrib non-free; do
114 mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
116 rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
117 cd ${scriptdir}/dep11
122 echo "ARRRR, bad guys, wrong files, ARRR"
123 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
128 log "Checking for cruft in overrides"
132 function dominate() {
133 log "Removing obsolete source and binary associations"
137 function autocruft() {
138 log "Check for obsolete binary packages"
139 dak auto-decruft -s unstable
140 dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
143 function fingerprints() {
144 log "Updating fingerprints"
145 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
148 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
150 if [ -s "${OUTFILE}" ]; then
151 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
152 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
153 To: <debian-project@lists.debian.org>
154 Subject: Debian Maintainers Keyring changes
155 Content-Type: text/plain; charset=utf-8
159 The following changes to the debian-maintainers keyring have just been activated:
163 Debian distribution maintenance software,
164 on behalf of the Keyring maintainers
171 function overrides() {
172 log "Writing overrides into text files"
180 log "Generating package / file mapping"
181 for archive in "${public_archives[@]}"; do
182 archiveroot="$(get_archiveroot "${archive}")"
183 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
187 function packages() {
188 log "Generating Packages and Sources files"
189 for archive in "${public_archives[@]}"; do
190 log " Generating Packages/Sources for ${archive}"
191 dak generate-packages-sources2 -a "${archive}"
192 log " Generating Contents for ${archive}"
193 dak contents generate -a "${archive}"
198 log "Generating pdiff files"
199 dak generate-index-diffs
203 log "Generating Release files"
204 for archive in "${public_archives[@]}"; do
205 dak generate-releases -a "${archive}"
209 function dakcleanup() {
210 log "Cleanup old packages/files"
211 dak clean-suites -m 10000
212 dak clean-queues -i "$unchecked"
219 for archive in "${public_archives[@]}"; do
220 archiveroot="$(get_archiveroot "${archive}")"
223 log "Removing any core files ..."
224 find -type f -name core -print -delete
226 log "Checking symlinks ..."
229 log "Creating recursive directory listing ... "
231 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
235 function mkmaintainers() {
239 log 'Creating Maintainers index ... '
241 for archive in "${public_archives[@]}"; do
242 archiveroot="$(get_archiveroot "${archive}")"
243 indices="${archiveroot}/indices"
244 if ! [ -d "${indices}" ]; then
249 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
250 gzip -9v --rsyncable <Maintainers >Maintainers.gz
251 gzip -9v --rsyncable <Uploaders >Uploaders.gz
255 function copyoverrides() {
256 log 'Copying override files into public view ...'
260 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
262 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
263 chmod g+w ${indices}/${bname}.gz
268 function mkfilesindices() {
271 cd $base/ftp/indices/files/components
275 log "Querying postgres"
277 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
279 JOIN files_archive_map af ON f.id = af.file_id
280 JOIN component c ON af.component_id = c.id
281 JOIN archive ON af.archive_id = archive.id
284 JOIN architecture a ON b.architecture = a.id)
286 WHERE archive.name = 'ftp-master'
287 ORDER BY path, arch_string
289 psql -At -c "$query" >$ARCHLIST
292 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
295 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
298 log "Generating sources list"
300 sed -n 's/|$//p' $ARCHLIST
302 find ./dists -maxdepth 1 \! -type d
303 find ./dists \! -type d | grep "/source/"
304 ) | sort -u | gzip -9 > source.list.gz
306 log "Generating arch lists"
308 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
310 (sed -n "s/|$a$//p" $ARCHLIST
311 sed -n 's/|all$//p' $ARCHLIST
314 find ./dists -maxdepth 1 \! -type d
315 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
316 ) | sort -u | gzip -9 > arch-$a.list.gz
319 log "Generating suite lists"
322 local suite_id="$(printf %d $1)"
325 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
327 (SELECT sa.source AS source
328 FROM src_associations sa
329 WHERE sa.suite = $suite_id
332 FROM extra_src_references esr
333 JOIN bin_associations ba ON esr.bin_id = ba.bin
334 WHERE ba.suite = $suite_id
336 SELECT b.source AS source
337 FROM bin_associations ba
338 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
339 JOIN dsc_files df ON s.source = df.source
340 JOIN files f ON df.file = f.id
341 JOIN files_archive_map af ON f.id = af.file_id
342 JOIN component c ON af.component_id = c.id
343 JOIN archive ON af.archive_id = archive.id
344 WHERE archive.name = 'ftp-master'
346 psql -F' ' -A -t -c "$query"
349 SELECT './pool/' || c.name || '/' || f.filename
350 FROM bin_associations ba
351 JOIN binaries b ON ba.bin = b.id
352 JOIN files f ON b.file = f.id
353 JOIN files_archive_map af ON f.id = af.file_id
354 JOIN component c ON af.component_id = c.id
355 JOIN archive ON af.archive_id = archive.id
356 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
358 psql -F' ' -A -t -c "$query"
361 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
362 while read id suite; do
363 [ -e $base/ftp/dists/$suite ] || continue
366 distname=$(cd dists; readlink $suite || echo $suite)
367 find ./dists/$distname \! -type d
368 for distdir in ./dists/*; do
369 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
373 ) | sort -u | gzip -9 > suite-${suite}.list.gz
376 log "Finding everything on the ftp site to generate sundries"
377 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
380 zcat *.list.gz | cat - *.list | sort -u |
381 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
383 log "Generating files list"
386 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
387 cat - sundries.list dists.list project.list docs.list indices.list |
388 sort -u | poolfirst > ../arch-$a.files
392 for dist in sid jessie stretch; do
393 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
397 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
398 sort -u | poolfirst > ../typical.files
405 function mkchecksums() {
406 local archiveroot dsynclist md5list
408 for archive in "${public_archives[@]}"; do
409 archiveroot="$(get_archiveroot "${archive}")"
410 dsynclist=$dbdir/dsync.${archive}.list
411 md5list=${archiveroot}/indices/md5sums
413 log -n "Creating md5 / dsync index file for ${archive}... "
416 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
417 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
418 ${bindir}/dsync-flist -q link-dups $dsynclist || true
423 local archiveroot mirrordir
425 log "Regenerating \"public\" mirror/ hardlink fun"
426 DATE_SERIAL=$(date +"%Y%m%d01")
427 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
428 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
429 SERIAL="$DATE_SERIAL"
431 SERIAL="$FILESOAPLUS1"
433 date -u > ${TRACEFILE}
434 echo "Using dak v1" >> ${TRACEFILE}
435 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
436 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
438 # Ugly "hack", but hey, it does what we want.
439 cp ${TRACEFILE} ${TRACEFILE_BDO}
441 for archive in "${public_archives[@]}"; do
442 archiveroot="$(get_archiveroot "${archive}")"
443 mirrordir="${archiveroot}/../mirror"
445 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
450 log "Expiring old database dumps..."
452 $scriptsdir/expire_dumps -d . -p -f "dump_*"
455 function transitionsclean() {
456 log "Removing out of date transitions..."
458 dak transitions -c -a
462 log "Updating DM permissions page"
463 dak acl export-per-source dm >$exportdir/dm.txt
467 log "Categorizing uncategorized bugs filed against ftp.debian.org"
468 sudo -u dak-unpriv dak bts-categorize
471 function ddaccess() {
472 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
473 log "Trigger dd accessible parts sync including ftp dir"
474 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
477 function mirrorpush() {
478 log "Checking the public archive copy"
479 cd ${mirrordir}/dists
482 for release in $(find . -name "InRelease"); do
483 echo "Processing: ${release}"
484 subdir=${release%/InRelease}
485 while read SHASUM SIZE NAME; do
486 if ! [ -f "${subdir}/${NAME}" ]; then
487 bname=$(basename ${NAME})
488 if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+)$ ]]; then
489 # We don't keep unpacked files, don't check for their existance.
490 # We might want to go and check their unpacked shasum, but right now
491 # I don't care. I believe it should be enough if all the packed shasums
495 broken=$(( broken + 1 ))
496 echo "File ${subdir}/${NAME} is missing"
500 # We do have symlinks in the tree (see the contents files currently).
501 # So we use "readlink -f" to check the size of the target, as thats basically
502 # what gen-releases does
503 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
504 if [ ${fsize} -ne ${SIZE} ]; then
505 broken=$(( broken + 1 ))
506 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
510 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
511 fshasum=${fshasum%% *}
512 if [ "${fshasum}" != "${SHASUM}" ]; then
513 broken=$(( broken + 1 ))
514 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
517 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
520 if [ $broken -gt 0 ]; then
521 log_error "Trouble with the public mirror, found ${broken} errors"
525 log "Starting the mirrorpush"
526 date -u > /srv/ftp.debian.org/web/mirrorstart
527 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
528 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
529 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
530 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
533 function mirrorpush-backports() {
534 log "Syncing backports mirror"
535 sudo -u backports /home/backports/bin/update-archive
539 log "Exporting package data foo for i18n project"
540 STAMP=$(date "+%Y%m%d%H%M")
541 mkdir -p ${scriptdir}/i18n/${STAMP}
542 cd ${scriptdir}/i18n/${STAMP}
543 for suite in stable testing unstable; do
544 codename=$(dak admin s show ${suite}|grep '^Codename')
545 codename=${codename##* }
546 echo "Codename is ${codename}"
547 dak control-suite -l ${suite} >${codename}
549 echo "${STAMP}" > timestamp
550 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
554 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
557 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
561 log "Updating stats data"
563 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
564 R --slave --vanilla < $base/misc/ftpstats.R
565 dak stats arch-space > $webdir/arch-space
566 dak stats pkg-nums > $webdir/pkg-nums
569 function cleantransactions() {
570 log "Cleanup transaction ids older than 3 months"
572 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
575 function logstats() {
576 $masterdir/tools/logs.py "$1"
579 # save timestamp when we start
580 function savetimestamp() {
581 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
582 echo ${NOW} > "${dbdir}/dinstallstart"
585 function maillogfile() {
586 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
589 function renamelogfile() {
590 if [ -f "${dbdir}/dinstallstart" ]; then
591 NOW=$(cat "${dbdir}/dinstallstart")
593 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
594 logstats "$logdir/dinstall_${NOW}.log"
595 bzip2 -9 "$logdir/dinstall_${NOW}.log"
597 error "Problem, I don't know when dinstall started, unable to do log statistics."
598 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
600 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
601 bzip2 -9 "$logdir/dinstall_${NOW}.log"
605 function testingsourcelist() {
606 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
609 # do a last run of process-unchecked before dinstall is on.
610 function process_unchecked() {
611 log "Processing the unchecked queue"
612 UNCHECKED_WITHOUT_LOCK="-p"
617 # Function to update a "statefile" telling people what we are doing
620 # This should be called with the argument(s)
621 # - Status name we want to show.
624 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
625 cat >"${DINSTALLSTATE}" <<EOF
626 Dinstall start: ${DINSTALLBEGIN}
628 Action start: ${RIGHTNOW}
632 # extract changelogs and stuff
633 function changelogs() {
634 if lockfile -r3 $LOCK_CHANGELOG; then
635 log "Extracting changelogs"
636 dak make-changelog -e -a ftp-master
637 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
638 mkdir -p ${exportpublic}/changelogs
639 cd ${exportpublic}/changelogs
640 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
641 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
643 dak make-changelog -e -a backports
644 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
645 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
646 cd /srv/backports-master.debian.org/rsync/export/changelogs
647 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
648 remove_changelog_lock
649 trap remove_changelog_lock EXIT TERM HUP INT QUIT
653 function gitpdiff() {
654 # Might be that we want to change this to have more than one git repository.
655 # Advantage of one is that we do not need much space in terms of storage in git itself,
656 # git gc is pretty good on our input.
657 # But it might be faster. Well, lets test.
658 log "Adjusting the git tree for pdiffs"
661 # The regex needs the architectures seperated with \|
662 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
664 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
666 # Also, we only want contents, packages and sources.
667 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
676 # Second, add all there is into git
679 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
681 TAGD=$(date +%Y-%m-%d-%H-%M)
682 git commit -m "Commit of ${COMD}"