2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function autocruft() {
111 log "Check for obsolete binary packages"
112 suites=${1:-"unstable experimental"}
113 # Initially only run with -n and output into a file, to check.
114 dstamp=$(date -u +"%Y-%m-%d_%H:%M")
115 echo "Report for ${dstamp}" > $webdir/auto-cruft-report_${dstamp}.txt
116 for suite in ${suites}; do
117 dak auto-decruft -n -s ${suite} >> $webdir/auto-cruft-report_${dstamp}.txt
119 cat $webdir/auto-cruft-report_${dstamp}.txt | mail -a "X-Debian: DAK" -e -s "Debian archive AUTOMATIC cruft report for ${dstamp}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" -c niels@thykier.net ftpmaster@ftp-master.debian.org
122 function fingerprints() {
123 log "Updating fingerprints"
124 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
127 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
129 if [ -s "${OUTFILE}" ]; then
130 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
131 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
132 To: <debian-project@lists.debian.org>
133 Subject: Debian Maintainers Keyring changes
134 Content-Type: text/plain; charset=utf-8
138 The following changes to the debian-maintainers keyring have just been activated:
142 Debian distribution maintenance software,
143 on behalf of the Keyring maintainers
150 function overrides() {
151 log "Writing overrides into text files"
159 log "Generating package / file mapping"
160 for archive in "${public_archives[@]}"; do
161 archiveroot="$(get_archiveroot "${archive}")"
162 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
166 function packages() {
167 log "Generating Packages and Sources files"
168 for archive in "${public_archives[@]}"; do
169 log " Generating Packages/Sources for ${archive}"
170 dak generate-packages-sources2 -a "${archive}"
171 log " Generating Contents for ${archive}"
172 dak contents generate -a "${archive}"
177 log "Generating pdiff files"
178 dak generate-index-diffs
182 log "Generating Release files"
183 for archive in "${public_archives[@]}"; do
184 dak generate-releases -a "${archive}"
188 function dakcleanup() {
189 log "Cleanup old packages/files"
190 dak clean-suites -m 10000
191 dak clean-queues -i "$unchecked"
198 for archive in "${public_archives[@]}"; do
199 archiveroot="$(get_archiveroot "${archive}")"
202 log "Removing any core files ..."
203 find -type f -name core -print -delete
205 log "Checking symlinks ..."
208 log "Creating recursive directory listing ... "
210 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
214 function mkmaintainers() {
218 log 'Creating Maintainers index ... '
220 for archive in "${public_archives[@]}"; do
221 archiveroot="$(get_archiveroot "${archive}")"
222 indices="${archiveroot}/indices"
223 if ! [ -d "${indices}" ]; then
228 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
229 gzip -9v --rsyncable <Maintainers >Maintainers.gz
230 gzip -9v --rsyncable <Uploaders >Uploaders.gz
234 function copyoverrides() {
235 log 'Copying override files into public view ...'
237 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
239 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
240 chmod g+w ${indices}/${bname}.gz
244 function mkfilesindices() {
247 cd $base/ftp/indices/files/components
251 log "Querying postgres"
253 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
255 JOIN files_archive_map af ON f.id = af.file_id
256 JOIN component c ON af.component_id = c.id
257 JOIN archive ON af.archive_id = archive.id
260 JOIN architecture a ON b.architecture = a.id)
262 WHERE archive.name = 'ftp-master'
263 ORDER BY path, arch_string
265 psql -At -c "$query" >$ARCHLIST
268 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
271 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
274 log "Generating sources list"
276 sed -n 's/|$//p' $ARCHLIST
278 find ./dists -maxdepth 1 \! -type d
279 find ./dists \! -type d | grep "/source/"
280 ) | sort -u | gzip -9 > source.list.gz
282 log "Generating arch lists"
284 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
286 (sed -n "s/|$a$//p" $ARCHLIST
287 sed -n 's/|all$//p' $ARCHLIST
290 find ./dists -maxdepth 1 \! -type d
291 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
292 ) | sort -u | gzip -9 > arch-$a.list.gz
295 log "Generating suite lists"
298 local suite_id="$(printf %d $1)"
301 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
303 (SELECT sa.source AS source
304 FROM src_associations sa
305 WHERE sa.suite = $suite_id
308 FROM extra_src_references esr
309 JOIN bin_associations ba ON esr.bin_id = ba.bin
310 WHERE ba.suite = $suite_id
312 SELECT b.source AS source
313 FROM bin_associations ba
314 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
315 JOIN dsc_files df ON s.source = df.source
316 JOIN files f ON df.file = f.id
317 JOIN files_archive_map af ON f.id = af.file_id
318 JOIN component c ON af.component_id = c.id
319 JOIN archive ON af.archive_id = archive.id
320 WHERE archive.name = 'ftp-master'
322 psql -F' ' -A -t -c "$query"
325 SELECT './pool/' || c.name || '/' || f.filename
326 FROM bin_associations ba
327 JOIN binaries b ON ba.bin = b.id
328 JOIN files f ON b.file = f.id
329 JOIN files_archive_map af ON f.id = af.file_id
330 JOIN component c ON af.component_id = c.id
331 JOIN archive ON af.archive_id = archive.id
332 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
334 psql -F' ' -A -t -c "$query"
337 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
338 while read id suite; do
339 [ -e $base/ftp/dists/$suite ] || continue
342 distname=$(cd dists; readlink $suite || echo $suite)
343 find ./dists/$distname \! -type d
344 for distdir in ./dists/*; do
345 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
349 ) | sort -u | gzip -9 > suite-${suite}.list.gz
352 log "Finding everything on the ftp site to generate sundries"
353 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
356 zcat *.list.gz | cat - *.list | sort -u |
357 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
359 log "Generating files list"
362 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
363 cat - sundries.list dists.list project.list docs.list indices.list |
364 sort -u | poolfirst > ../arch-$a.files
368 for dist in sid jessie stretch; do
369 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
373 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
374 sort -u | poolfirst > ../typical.files
381 function mkchecksums() {
382 local archiveroot dsynclist md5list
384 for archive in "${public_archives[@]}"; do
385 archiveroot="$(get_archiveroot "${archive}")"
386 dsynclist=$dbdir/dsync.${archive}.list
387 md5list=${archiveroot}/indices/md5sums
389 log -n "Creating md5 / dsync index file for ${archive}... "
392 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
393 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
394 ${bindir}/dsync-flist -q link-dups $dsynclist || true
399 local archiveroot mirrordir
401 log "Regenerating \"public\" mirror/ hardlink fun"
402 DATE_SERIAL=$(date +"%Y%m%d01")
403 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
404 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
405 SERIAL="$DATE_SERIAL"
407 SERIAL="$FILESOAPLUS1"
409 date -u > ${TRACEFILE}
410 echo "Using dak v1" >> ${TRACEFILE}
411 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
412 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
414 # Ugly "hack", but hey, it does what we want.
415 cp ${TRACEFILE} ${TRACEFILE_BDO}
417 for archive in "${public_archives[@]}"; do
418 archiveroot="$(get_archiveroot "${archive}")"
419 mirrordir="${archiveroot}/../mirror"
421 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
426 log "Expiring old database dumps..."
428 $scriptsdir/expire_dumps -d . -p -f "dump_*"
431 function transitionsclean() {
432 log "Removing out of date transitions..."
434 dak transitions -c -a
438 log "Updating DM permissions page"
439 dak acl export-per-source dm >$exportdir/dm.txt
443 log "Categorizing uncategorized bugs filed against ftp.debian.org"
444 sudo -u dak-unpriv dak bts-categorize
447 function ddaccess() {
448 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
449 log "Trigger dd accessible parts sync including ftp dir"
450 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
453 function mirrorpush() {
454 log "Checking the public archive copy"
455 cd ${mirrordir}/dists
458 for release in $(find . -name "InRelease"); do
459 echo "Processing: ${release}"
460 subdir=${release%/InRelease}
461 while read SHASUM SIZE NAME; do
462 if ! [ -f "${subdir}/${NAME}" ]; then
463 bname=$(basename ${NAME})
464 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
465 # We don't keep unpacked files, don't check for their existance.
466 # We might want to go and check their unpacked shasum, but right now
467 # I don't care. I believe it should be enough if all the packed shasums
471 broken=$(( broken + 1 ))
472 echo "File ${subdir}/${NAME} is missing"
476 # We do have symlinks in the tree (see the contents files currently).
477 # So we use "readlink -f" to check the size of the target, as thats basically
478 # what gen-releases does
479 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
480 if [ ${fsize} -ne ${SIZE} ]; then
481 broken=$(( broken + 1 ))
482 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
486 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
487 fshasum=${fshasum%% *}
488 if [ "${fshasum}" != "${SHASUM}" ]; then
489 broken=$(( broken + 1 ))
490 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
493 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
496 if [ $broken -gt 0 ]; then
497 log_error "Trouble with the public mirror, found ${broken} errors"
501 log "Starting the mirrorpush"
502 date -u > /srv/ftp.debian.org/web/mirrorstart
503 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
504 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
505 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
506 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
509 function mirrorpush-backports() {
510 log "Syncing backports mirror"
511 sudo -u backports /home/backports/bin/update-archive
515 log "Exporting package data foo for i18n project"
516 STAMP=$(date "+%Y%m%d%H%M")
517 mkdir -p ${scriptdir}/i18n/${STAMP}
518 cd ${scriptdir}/i18n/${STAMP}
519 for suite in stable testing unstable; do
520 codename=$(dak admin s show ${suite}|grep '^Codename')
521 codename=${codename##* }
522 echo "Codename is ${codename}"
523 dak control-suite -l ${suite} >${codename}
525 echo "${STAMP}" > timestamp
526 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
530 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
533 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
537 log "Updating stats data"
539 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
540 R --slave --vanilla < $base/misc/ftpstats.R
541 dak stats arch-space > $webdir/arch-space
542 dak stats pkg-nums > $webdir/pkg-nums
545 function cleantransactions() {
546 log "Cleanup transaction ids older than 3 months"
548 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
551 function logstats() {
552 $masterdir/tools/logs.py "$1"
555 # save timestamp when we start
556 function savetimestamp() {
557 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
558 echo ${NOW} > "${dbdir}/dinstallstart"
561 function maillogfile() {
562 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
565 function renamelogfile() {
566 if [ -f "${dbdir}/dinstallstart" ]; then
567 NOW=$(cat "${dbdir}/dinstallstart")
569 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
570 logstats "$logdir/dinstall_${NOW}.log"
571 bzip2 -9 "$logdir/dinstall_${NOW}.log"
573 error "Problem, I don't know when dinstall started, unable to do log statistics."
574 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
576 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
577 bzip2 -9 "$logdir/dinstall_${NOW}.log"
581 function testingsourcelist() {
582 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
585 # do a last run of process-unchecked before dinstall is on.
586 function process_unchecked() {
587 log "Processing the unchecked queue"
588 UNCHECKED_WITHOUT_LOCK="-p"
593 # Function to update a "statefile" telling people what we are doing
596 # This should be called with the argument(s)
597 # - Status name we want to show.
600 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
601 cat >"${DINSTALLSTATE}" <<EOF
602 Dinstall start: ${DINSTALLBEGIN}
604 Action start: ${RIGHTNOW}
608 # extract changelogs and stuff
609 function changelogs() {
610 if lockfile -r3 $LOCK_CHANGELOG; then
611 log "Extracting changelogs"
612 dak make-changelog -e -a ftp-master
613 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
614 mkdir -p ${exportpublic}/changelogs
615 cd ${exportpublic}/changelogs
616 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
617 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
619 dak make-changelog -e -a backports
620 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
621 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
622 cd /srv/backports-master.debian.org/rsync/export/changelogs
623 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
624 remove_changelog_lock
625 trap remove_changelog_lock EXIT TERM HUP INT QUIT
629 function gitpdiff() {
630 # Might be that we want to change this to have more than one git repository.
631 # Advantage of one is that we do not need much space in terms of storage in git itself,
632 # git gc is pretty good on our input.
633 # But it might be faster. Well, lets test.
634 log "Adjusting the git tree for pdiffs"
637 # The regex needs the architectures seperated with \|
638 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
640 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
642 # Also, we only want contents, packages and sources.
643 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
652 # Second, add all there is into git
655 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
657 TAGD=$(date +%Y-%m-%d-%H-%M)
658 git commit -m "Commit of ${COMD}"