2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
100 # Syncing AppStream/DEP-11 data
102 log "Synchronizing AppStream metadata"
103 # First sync their newest data
104 mkdir -p ${scriptdir}/dep11
105 cd ${scriptdir}/dep11
106 rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true
109 if ${scriptsdir}/dep11-basic-validate.py . ${scriptdir}/dep11/; then
110 # Yay, worked, lets copy around
111 for dir in stretch sid; do
112 if [ -d ${dir}/ ]; then
113 for comp in main contrib non-free; do
114 mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
116 rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
117 cd ${scriptdir}/dep11
122 echo "ARRRR, bad guys, wrong files, ARRR"
123 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
128 log "Checking for cruft in overrides"
132 function dominate() {
133 log "Removing obsolete source and binary associations"
135 dak manage-debug-suites unstable-debug experimental-debug
138 function autocruft() {
139 log "Check for obsolete binary packages"
140 dak auto-decruft -s unstable
141 dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
144 function fingerprints() {
145 log "Updating fingerprints"
146 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
149 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
151 if [ -s "${OUTFILE}" ]; then
152 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
153 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
154 To: <debian-project@lists.debian.org>
155 Subject: Debian Maintainers Keyring changes
156 Content-Type: text/plain; charset=utf-8
160 The following changes to the debian-maintainers keyring have just been activated:
164 Debian distribution maintenance software,
165 on behalf of the Keyring maintainers
172 function overrides() {
173 log "Writing overrides into text files"
181 log "Generating package / file mapping"
182 for archive in "${public_archives[@]}"; do
183 archiveroot="$(get_archiveroot "${archive}")"
184 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
188 function packages() {
189 log "Generating Packages and Sources files"
190 for archive in "${public_archives[@]}"; do
191 log " Generating Packages/Sources for ${archive}"
192 dak generate-packages-sources2 -a "${archive}"
193 log " Generating Contents for ${archive}"
194 dak contents generate -a "${archive}"
199 log "Generating pdiff files"
200 dak generate-index-diffs
204 log "Generating Release files"
205 for archive in "${public_archives[@]}"; do
206 dak generate-releases -a "${archive}"
210 function dakcleanup() {
211 log "Cleanup old packages/files"
212 dak clean-suites -m 10000
213 dak clean-queues -i "$unchecked"
220 for archive in "${public_archives[@]}"; do
221 archiveroot="$(get_archiveroot "${archive}")"
224 log "Removing any core files ..."
225 find -type f -name core -print -delete
227 log "Checking symlinks ..."
230 log "Creating recursive directory listing ... "
232 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
236 function mkmaintainers() {
240 log 'Creating Maintainers index ... '
242 for archive in "${public_archives[@]}"; do
243 archiveroot="$(get_archiveroot "${archive}")"
244 indices="${archiveroot}/indices"
245 if ! [ -d "${indices}" ]; then
250 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
251 gzip -9v --rsyncable <Maintainers >Maintainers.gz
252 gzip -9v --rsyncable <Uploaders >Uploaders.gz
256 function copyoverrides() {
257 log 'Copying override files into public view ...'
261 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
263 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
264 chmod g+w ${indices}/${bname}.gz
269 function mkfilesindices() {
272 cd $base/ftp/indices/files/components
276 log "Querying postgres"
278 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
280 JOIN files_archive_map af ON f.id = af.file_id
281 JOIN component c ON af.component_id = c.id
282 JOIN archive ON af.archive_id = archive.id
285 JOIN architecture a ON b.architecture = a.id)
287 WHERE archive.name = 'ftp-master'
288 ORDER BY path, arch_string
290 psql -At -c "$query" >$ARCHLIST
293 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
296 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
299 log "Generating sources list"
301 sed -n 's/|$//p' $ARCHLIST
303 find ./dists -maxdepth 1 \! -type d
304 find ./dists \! -type d | grep "/source/"
305 ) | sort -u | gzip -9 > source.list.gz
307 log "Generating arch lists"
309 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
311 (sed -n "s/|$a$//p" $ARCHLIST
312 sed -n 's/|all$//p' $ARCHLIST
315 find ./dists -maxdepth 1 \! -type d
316 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
317 ) | sort -u | gzip -9 > arch-$a.list.gz
320 log "Generating suite lists"
323 local suite_id="$(printf %d $1)"
326 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
328 (SELECT sa.source AS source
329 FROM src_associations sa
330 WHERE sa.suite = $suite_id
333 FROM extra_src_references esr
334 JOIN bin_associations ba ON esr.bin_id = ba.bin
335 WHERE ba.suite = $suite_id
337 SELECT b.source AS source
338 FROM bin_associations ba
339 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
340 JOIN dsc_files df ON s.source = df.source
341 JOIN files f ON df.file = f.id
342 JOIN files_archive_map af ON f.id = af.file_id
343 JOIN component c ON af.component_id = c.id
344 JOIN archive ON af.archive_id = archive.id
345 WHERE archive.name = 'ftp-master'
347 psql -F' ' -A -t -c "$query"
350 SELECT './pool/' || c.name || '/' || f.filename
351 FROM bin_associations ba
352 JOIN binaries b ON ba.bin = b.id
353 JOIN files f ON b.file = f.id
354 JOIN files_archive_map af ON f.id = af.file_id
355 JOIN component c ON af.component_id = c.id
356 JOIN archive ON af.archive_id = archive.id
357 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
359 psql -F' ' -A -t -c "$query"
362 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
363 while read id suite; do
364 [ -e $base/ftp/dists/$suite ] || continue
367 distname=$(cd dists; readlink $suite || echo $suite)
368 find ./dists/$distname \! -type d
369 for distdir in ./dists/*; do
370 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
374 ) | sort -u | gzip -9 > suite-${suite}.list.gz
377 log "Finding everything on the ftp site to generate sundries"
378 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
381 zcat *.list.gz | cat - *.list | sort -u |
382 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
384 log "Generating files list"
387 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
388 cat - sundries.list dists.list project.list docs.list indices.list |
389 sort -u | poolfirst > ../arch-$a.files
393 for dist in sid jessie stretch; do
394 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
398 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
399 sort -u | poolfirst > ../typical.files
406 function mkchecksums() {
407 local archiveroot dsynclist md5list
409 for archive in "${public_archives[@]}"; do
410 archiveroot="$(get_archiveroot "${archive}")"
411 dsynclist=$dbdir/dsync.${archive}.list
412 md5list=${archiveroot}/indices/md5sums
414 log -n "Creating md5 / dsync index file for ${archive}... "
417 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
418 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
419 ${bindir}/dsync-flist -q link-dups $dsynclist || true
424 local archiveroot mirrordir
426 log "Regenerating \"public\" mirror/ hardlink fun"
427 DATE_SERIAL=$(date +"%Y%m%d01")
428 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
429 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
430 SERIAL="$DATE_SERIAL"
432 SERIAL="$FILESOAPLUS1"
434 date -u > ${TRACEFILE}
435 echo "Using dak v1" >> ${TRACEFILE}
436 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
437 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
439 # Ugly "hack", but hey, it does what we want.
440 cp ${TRACEFILE} ${TRACEFILE_BDO}
442 for archive in ftp-master backports; do
443 archiveroot="$(get_archiveroot "${archive}")"
444 mirrordir="${archiveroot}/../mirror"
446 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
451 log "Expiring old database dumps..."
453 $scriptsdir/expire_dumps -d . -p -f "dump_*"
456 function transitionsclean() {
457 log "Removing out of date transitions..."
459 dak transitions -c -a
463 log "Updating DM permissions page"
464 dak acl export-per-source dm >$exportdir/dm.txt
468 log "Categorizing uncategorized bugs filed against ftp.debian.org"
469 sudo -u dak-unpriv dak bts-categorize
472 function ddaccess() {
473 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
474 log "Trigger dd accessible parts sync including ftp dir"
475 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
478 function mirrorpush() {
479 log "Checking the public archive copy"
480 cd ${mirrordir}/dists
483 for release in $(find . -name "InRelease"); do
484 echo "Processing: ${release}"
485 subdir=${release%/InRelease}
486 while read SHASUM SIZE NAME; do
487 if ! [ -f "${subdir}/${NAME}" ]; then
488 bname=$(basename ${NAME})
489 if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-(amd64|i386)\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
491 # We don't keep unpacked files, don't check for their existance.
492 # We might want to go and check their unpacked shasum, but right now
493 # I don't care. I believe it should be enough if all the packed shasums
497 broken=$(( broken + 1 ))
498 echo "File ${subdir}/${NAME} is missing"
502 # We do have symlinks in the tree (see the contents files currently).
503 # So we use "readlink -f" to check the size of the target, as thats basically
504 # what gen-releases does
505 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
506 if [ ${fsize} -ne ${SIZE} ]; then
507 broken=$(( broken + 1 ))
508 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
512 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
513 fshasum=${fshasum%% *}
514 if [ "${fshasum}" != "${SHASUM}" ]; then
515 broken=$(( broken + 1 ))
516 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
519 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
522 if [ $broken -gt 0 ]; then
523 log_error "Trouble with the public mirror, found ${broken} errors"
527 log "Starting the mirrorpush"
528 date -u > /srv/ftp.debian.org/web/mirrorstart
529 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
530 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
531 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
532 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
535 function mirrorpush-backports() {
536 log "Syncing backports mirror"
537 sudo -u backports /home/backports/bin/update-archive
541 log "Exporting package data foo for i18n project"
542 STAMP=$(date "+%Y%m%d%H%M")
543 mkdir -p ${scriptdir}/i18n/${STAMP}
544 cd ${scriptdir}/i18n/${STAMP}
545 for suite in stable testing unstable; do
546 codename=$(dak admin s show ${suite}|grep '^Codename')
547 codename=${codename##* }
548 echo "Codename is ${codename}"
549 dak control-suite -l ${suite} >${codename}
551 echo "${STAMP}" > timestamp
552 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
556 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
559 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
563 log "Updating stats data"
565 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
566 R --slave --vanilla < $base/misc/ftpstats.R
567 dak stats arch-space > $webdir/arch-space
568 dak stats pkg-nums > $webdir/pkg-nums
571 function cleantransactions() {
572 log "Cleanup transaction ids older than 3 months"
574 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
577 function logstats() {
578 $masterdir/tools/logs.py "$1"
581 # save timestamp when we start
582 function savetimestamp() {
583 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
584 echo ${NOW} > "${dbdir}/dinstallstart"
587 function maillogfile() {
588 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
591 function renamelogfile() {
592 if [ -f "${dbdir}/dinstallstart" ]; then
593 NOW=$(cat "${dbdir}/dinstallstart")
595 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
596 logstats "$logdir/dinstall_${NOW}.log"
597 bzip2 -9 "$logdir/dinstall_${NOW}.log"
599 error "Problem, I don't know when dinstall started, unable to do log statistics."
600 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
602 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
603 bzip2 -9 "$logdir/dinstall_${NOW}.log"
607 function testingsourcelist() {
608 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
611 # do a last run of process-unchecked before dinstall is on.
612 function process_unchecked() {
613 log "Processing the unchecked queue"
614 UNCHECKED_WITHOUT_LOCK="-p"
619 # Function to update a "statefile" telling people what we are doing
622 # This should be called with the argument(s)
623 # - Status name we want to show.
626 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
627 cat >"${DINSTALLSTATE}" <<EOF
628 Dinstall start: ${DINSTALLBEGIN}
630 Action start: ${RIGHTNOW}
634 # extract changelogs and stuff
635 function changelogs() {
636 if lockfile -r3 $LOCK_CHANGELOG; then
637 log "Extracting changelogs"
638 dak make-changelog -e -a ftp-master
639 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
640 mkdir -p ${exportpublic}/changelogs
641 cd ${exportpublic}/changelogs
642 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
643 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
645 dak make-changelog -e -a backports
646 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
647 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
648 cd /srv/backports-master.debian.org/rsync/export/changelogs
649 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
650 remove_changelog_lock
651 trap remove_changelog_lock EXIT TERM HUP INT QUIT
655 function gitpdiff() {
656 # Might be that we want to change this to have more than one git repository.
657 # Advantage of one is that we do not need much space in terms of storage in git itself,
658 # git gc is pretty good on our input.
659 # But it might be faster. Well, lets test.
660 log "Adjusting the git tree for pdiffs"
663 # The regex needs the architectures seperated with \|
664 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
666 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
668 # Also, we only want contents, packages and sources.
669 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
678 # Second, add all there is into git
681 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
683 TAGD=$(date +%Y-%m-%d-%H-%M)
684 git commit -m "Commit of ${COMD}"