2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function autocruft() {
111 log "Check for obsolete binary packages"
112 local suites=${1:-"unstable experimental"}
113 # Initially only run with -n and output into a file, to check.
114 local dstamp=$(date -u +"%Y-%m-%d_%H:%M")
115 local report=${webdir}/auto-cruft-report_${dstamp}.txt
116 echo "Report for ${dstamp}" > ${report}
117 for suite in ${suites}; do
118 dak auto-decruft -n -s ${suite} >> ${report}
120 cat ${report} | mail -a "X-Debian: DAK" -e -s "Debian archive AUTOMATIC cruft report for ${dstamp}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" -c niels@thykier.net ftpmaster@ftp-master.debian.org
123 function fingerprints() {
124 log "Updating fingerprints"
125 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
128 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
130 if [ -s "${OUTFILE}" ]; then
131 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
132 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
133 To: <debian-project@lists.debian.org>
134 Subject: Debian Maintainers Keyring changes
135 Content-Type: text/plain; charset=utf-8
139 The following changes to the debian-maintainers keyring have just been activated:
143 Debian distribution maintenance software,
144 on behalf of the Keyring maintainers
151 function overrides() {
152 log "Writing overrides into text files"
160 log "Generating package / file mapping"
161 for archive in "${public_archives[@]}"; do
162 archiveroot="$(get_archiveroot "${archive}")"
163 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
167 function packages() {
168 log "Generating Packages and Sources files"
169 for archive in "${public_archives[@]}"; do
170 log " Generating Packages/Sources for ${archive}"
171 dak generate-packages-sources2 -a "${archive}"
172 log " Generating Contents for ${archive}"
173 dak contents generate -a "${archive}"
178 log "Generating pdiff files"
179 dak generate-index-diffs
183 log "Generating Release files"
184 for archive in "${public_archives[@]}"; do
185 dak generate-releases -a "${archive}"
189 function dakcleanup() {
190 log "Cleanup old packages/files"
191 dak clean-suites -m 10000
192 dak clean-queues -i "$unchecked"
199 for archive in "${public_archives[@]}"; do
200 archiveroot="$(get_archiveroot "${archive}")"
203 log "Removing any core files ..."
204 find -type f -name core -print -delete
206 log "Checking symlinks ..."
209 log "Creating recursive directory listing ... "
211 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
215 function mkmaintainers() {
219 log 'Creating Maintainers index ... '
221 for archive in "${public_archives[@]}"; do
222 archiveroot="$(get_archiveroot "${archive}")"
223 indices="${archiveroot}/indices"
224 if ! [ -d "${indices}" ]; then
229 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
230 gzip -9v --rsyncable <Maintainers >Maintainers.gz
231 gzip -9v --rsyncable <Uploaders >Uploaders.gz
235 function copyoverrides() {
236 log 'Copying override files into public view ...'
238 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
240 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
241 chmod g+w ${indices}/${bname}.gz
245 function mkfilesindices() {
248 cd $base/ftp/indices/files/components
252 log "Querying postgres"
254 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
256 JOIN files_archive_map af ON f.id = af.file_id
257 JOIN component c ON af.component_id = c.id
258 JOIN archive ON af.archive_id = archive.id
261 JOIN architecture a ON b.architecture = a.id)
263 WHERE archive.name = 'ftp-master'
264 ORDER BY path, arch_string
266 psql -At -c "$query" >$ARCHLIST
269 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
272 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
275 log "Generating sources list"
277 sed -n 's/|$//p' $ARCHLIST
279 find ./dists -maxdepth 1 \! -type d
280 find ./dists \! -type d | grep "/source/"
281 ) | sort -u | gzip -9 > source.list.gz
283 log "Generating arch lists"
285 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
287 (sed -n "s/|$a$//p" $ARCHLIST
288 sed -n 's/|all$//p' $ARCHLIST
291 find ./dists -maxdepth 1 \! -type d
292 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
293 ) | sort -u | gzip -9 > arch-$a.list.gz
296 log "Generating suite lists"
299 local suite_id="$(printf %d $1)"
302 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
304 (SELECT sa.source AS source
305 FROM src_associations sa
306 WHERE sa.suite = $suite_id
309 FROM extra_src_references esr
310 JOIN bin_associations ba ON esr.bin_id = ba.bin
311 WHERE ba.suite = $suite_id
313 SELECT b.source AS source
314 FROM bin_associations ba
315 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
316 JOIN dsc_files df ON s.source = df.source
317 JOIN files f ON df.file = f.id
318 JOIN files_archive_map af ON f.id = af.file_id
319 JOIN component c ON af.component_id = c.id
320 JOIN archive ON af.archive_id = archive.id
321 WHERE archive.name = 'ftp-master'
323 psql -F' ' -A -t -c "$query"
326 SELECT './pool/' || c.name || '/' || f.filename
327 FROM bin_associations ba
328 JOIN binaries b ON ba.bin = b.id
329 JOIN files f ON b.file = f.id
330 JOIN files_archive_map af ON f.id = af.file_id
331 JOIN component c ON af.component_id = c.id
332 JOIN archive ON af.archive_id = archive.id
333 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
335 psql -F' ' -A -t -c "$query"
338 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
339 while read id suite; do
340 [ -e $base/ftp/dists/$suite ] || continue
343 distname=$(cd dists; readlink $suite || echo $suite)
344 find ./dists/$distname \! -type d
345 for distdir in ./dists/*; do
346 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
350 ) | sort -u | gzip -9 > suite-${suite}.list.gz
353 log "Finding everything on the ftp site to generate sundries"
354 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
357 zcat *.list.gz | cat - *.list | sort -u |
358 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
360 log "Generating files list"
363 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
364 cat - sundries.list dists.list project.list docs.list indices.list |
365 sort -u | poolfirst > ../arch-$a.files
369 for dist in sid jessie stretch; do
370 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
374 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
375 sort -u | poolfirst > ../typical.files
382 function mkchecksums() {
383 local archiveroot dsynclist md5list
385 for archive in "${public_archives[@]}"; do
386 archiveroot="$(get_archiveroot "${archive}")"
387 dsynclist=$dbdir/dsync.${archive}.list
388 md5list=${archiveroot}/indices/md5sums
390 log -n "Creating md5 / dsync index file for ${archive}... "
393 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
394 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
395 ${bindir}/dsync-flist -q link-dups $dsynclist || true
400 local archiveroot mirrordir
402 log "Regenerating \"public\" mirror/ hardlink fun"
403 DATE_SERIAL=$(date +"%Y%m%d01")
404 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
405 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
406 SERIAL="$DATE_SERIAL"
408 SERIAL="$FILESOAPLUS1"
410 date -u > ${TRACEFILE}
411 echo "Using dak v1" >> ${TRACEFILE}
412 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
413 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
415 # Ugly "hack", but hey, it does what we want.
416 cp ${TRACEFILE} ${TRACEFILE_BDO}
418 for archive in "${public_archives[@]}"; do
419 archiveroot="$(get_archiveroot "${archive}")"
420 mirrordir="${archiveroot}/../mirror"
422 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
427 log "Expiring old database dumps..."
429 $scriptsdir/expire_dumps -d . -p -f "dump_*"
432 function transitionsclean() {
433 log "Removing out of date transitions..."
435 dak transitions -c -a
439 log "Updating DM permissions page"
440 dak acl export-per-source dm >$exportdir/dm.txt
444 log "Categorizing uncategorized bugs filed against ftp.debian.org"
445 sudo -u dak-unpriv dak bts-categorize
448 function ddaccess() {
449 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
450 log "Trigger dd accessible parts sync including ftp dir"
451 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
454 function mirrorpush() {
455 log "Checking the public archive copy"
456 cd ${mirrordir}/dists
459 for release in $(find . -name "InRelease"); do
460 echo "Processing: ${release}"
461 subdir=${release%/InRelease}
462 while read SHASUM SIZE NAME; do
463 if ! [ -f "${subdir}/${NAME}" ]; then
464 bname=$(basename ${NAME})
465 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
466 # We don't keep unpacked files, don't check for their existance.
467 # We might want to go and check their unpacked shasum, but right now
468 # I don't care. I believe it should be enough if all the packed shasums
472 broken=$(( broken + 1 ))
473 echo "File ${subdir}/${NAME} is missing"
477 # We do have symlinks in the tree (see the contents files currently).
478 # So we use "readlink -f" to check the size of the target, as thats basically
479 # what gen-releases does
480 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
481 if [ ${fsize} -ne ${SIZE} ]; then
482 broken=$(( broken + 1 ))
483 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
487 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
488 fshasum=${fshasum%% *}
489 if [ "${fshasum}" != "${SHASUM}" ]; then
490 broken=$(( broken + 1 ))
491 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
494 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
497 if [ $broken -gt 0 ]; then
498 log_error "Trouble with the public mirror, found ${broken} errors"
502 log "Starting the mirrorpush"
503 date -u > /srv/ftp.debian.org/web/mirrorstart
504 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
505 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
506 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
507 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
510 function mirrorpush-backports() {
511 log "Syncing backports mirror"
512 sudo -u backports /home/backports/bin/update-archive
516 log "Exporting package data foo for i18n project"
517 STAMP=$(date "+%Y%m%d%H%M")
518 mkdir -p ${scriptdir}/i18n/${STAMP}
519 cd ${scriptdir}/i18n/${STAMP}
520 for suite in stable testing unstable; do
521 codename=$(dak admin s show ${suite}|grep '^Codename')
522 codename=${codename##* }
523 echo "Codename is ${codename}"
524 dak control-suite -l ${suite} >${codename}
526 echo "${STAMP}" > timestamp
527 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
531 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
534 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
538 log "Updating stats data"
540 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
541 R --slave --vanilla < $base/misc/ftpstats.R
542 dak stats arch-space > $webdir/arch-space
543 dak stats pkg-nums > $webdir/pkg-nums
546 function cleantransactions() {
547 log "Cleanup transaction ids older than 3 months"
549 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
552 function logstats() {
553 $masterdir/tools/logs.py "$1"
556 # save timestamp when we start
557 function savetimestamp() {
558 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
559 echo ${NOW} > "${dbdir}/dinstallstart"
562 function maillogfile() {
563 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
566 function renamelogfile() {
567 if [ -f "${dbdir}/dinstallstart" ]; then
568 NOW=$(cat "${dbdir}/dinstallstart")
570 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
571 logstats "$logdir/dinstall_${NOW}.log"
572 bzip2 -9 "$logdir/dinstall_${NOW}.log"
574 error "Problem, I don't know when dinstall started, unable to do log statistics."
575 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
577 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
578 bzip2 -9 "$logdir/dinstall_${NOW}.log"
582 function testingsourcelist() {
583 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
586 # do a last run of process-unchecked before dinstall is on.
587 function process_unchecked() {
588 log "Processing the unchecked queue"
589 UNCHECKED_WITHOUT_LOCK="-p"
594 # Function to update a "statefile" telling people what we are doing
597 # This should be called with the argument(s)
598 # - Status name we want to show.
601 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
602 cat >"${DINSTALLSTATE}" <<EOF
603 Dinstall start: ${DINSTALLBEGIN}
605 Action start: ${RIGHTNOW}
609 # extract changelogs and stuff
610 function changelogs() {
611 if lockfile -r3 $LOCK_CHANGELOG; then
612 log "Extracting changelogs"
613 dak make-changelog -e -a ftp-master
614 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
615 mkdir -p ${exportpublic}/changelogs
616 cd ${exportpublic}/changelogs
617 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
618 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
620 dak make-changelog -e -a backports
621 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
622 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
623 cd /srv/backports-master.debian.org/rsync/export/changelogs
624 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
625 remove_changelog_lock
626 trap remove_changelog_lock EXIT TERM HUP INT QUIT
630 function gitpdiff() {
631 # Might be that we want to change this to have more than one git repository.
632 # Advantage of one is that we do not need much space in terms of storage in git itself,
633 # git gc is pretty good on our input.
634 # But it might be faster. Well, lets test.
635 log "Adjusting the git tree for pdiffs"
638 # The regex needs the architectures seperated with \|
639 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
641 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
643 # Also, we only want contents, packages and sources.
644 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
653 # Second, add all there is into git
656 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
658 TAGD=$(date +%Y-%m-%d-%H-%M)
659 git commit -m "Commit of ${COMD}"