2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function autocruft() {
111 log "Check for obsolete binary packages"
112 # Initially only run with -n and output into a file, to check.
113 local dstamp=$(date -u +"%Y-%m-%d_%H:%M")
114 local report=${webdir}/auto-cruft-report_${dstamp}.txt
115 echo "Report for ${dstamp}" > ${report}
116 dak auto-decruft -n -s unstable >> ${report}
117 dak auto-decruft -n -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU" >> ${report}
118 cat ${report} | mail -a "X-Debian: DAK" -e -s "Debian archive AUTOMATIC cruft report for ${dstamp}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" -c niels@thykier.net ftpmaster@ftp-master.debian.org
121 function fingerprints() {
122 log "Updating fingerprints"
123 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
126 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
128 if [ -s "${OUTFILE}" ]; then
129 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
130 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
131 To: <debian-project@lists.debian.org>
132 Subject: Debian Maintainers Keyring changes
133 Content-Type: text/plain; charset=utf-8
137 The following changes to the debian-maintainers keyring have just been activated:
141 Debian distribution maintenance software,
142 on behalf of the Keyring maintainers
149 function overrides() {
150 log "Writing overrides into text files"
158 log "Generating package / file mapping"
159 for archive in "${public_archives[@]}"; do
160 archiveroot="$(get_archiveroot "${archive}")"
161 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
165 function packages() {
166 log "Generating Packages and Sources files"
167 for archive in "${public_archives[@]}"; do
168 log " Generating Packages/Sources for ${archive}"
169 dak generate-packages-sources2 -a "${archive}"
170 log " Generating Contents for ${archive}"
171 dak contents generate -a "${archive}"
176 log "Generating pdiff files"
177 dak generate-index-diffs
181 log "Generating Release files"
182 for archive in "${public_archives[@]}"; do
183 dak generate-releases -a "${archive}"
187 function dakcleanup() {
188 log "Cleanup old packages/files"
189 dak clean-suites -m 10000
190 dak clean-queues -i "$unchecked"
197 for archive in "${public_archives[@]}"; do
198 archiveroot="$(get_archiveroot "${archive}")"
201 log "Removing any core files ..."
202 find -type f -name core -print -delete
204 log "Checking symlinks ..."
207 log "Creating recursive directory listing ... "
209 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
213 function mkmaintainers() {
217 log 'Creating Maintainers index ... '
219 for archive in "${public_archives[@]}"; do
220 archiveroot="$(get_archiveroot "${archive}")"
221 indices="${archiveroot}/indices"
222 if ! [ -d "${indices}" ]; then
227 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
228 gzip -9v --rsyncable <Maintainers >Maintainers.gz
229 gzip -9v --rsyncable <Uploaders >Uploaders.gz
233 function copyoverrides() {
234 log 'Copying override files into public view ...'
236 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
238 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
239 chmod g+w ${indices}/${bname}.gz
243 function mkfilesindices() {
246 cd $base/ftp/indices/files/components
250 log "Querying postgres"
252 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
254 JOIN files_archive_map af ON f.id = af.file_id
255 JOIN component c ON af.component_id = c.id
256 JOIN archive ON af.archive_id = archive.id
259 JOIN architecture a ON b.architecture = a.id)
261 WHERE archive.name = 'ftp-master'
262 ORDER BY path, arch_string
264 psql -At -c "$query" >$ARCHLIST
267 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
270 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
273 log "Generating sources list"
275 sed -n 's/|$//p' $ARCHLIST
277 find ./dists -maxdepth 1 \! -type d
278 find ./dists \! -type d | grep "/source/"
279 ) | sort -u | gzip -9 > source.list.gz
281 log "Generating arch lists"
283 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
285 (sed -n "s/|$a$//p" $ARCHLIST
286 sed -n 's/|all$//p' $ARCHLIST
289 find ./dists -maxdepth 1 \! -type d
290 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
291 ) | sort -u | gzip -9 > arch-$a.list.gz
294 log "Generating suite lists"
297 local suite_id="$(printf %d $1)"
300 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
302 (SELECT sa.source AS source
303 FROM src_associations sa
304 WHERE sa.suite = $suite_id
307 FROM extra_src_references esr
308 JOIN bin_associations ba ON esr.bin_id = ba.bin
309 WHERE ba.suite = $suite_id
311 SELECT b.source AS source
312 FROM bin_associations ba
313 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
314 JOIN dsc_files df ON s.source = df.source
315 JOIN files f ON df.file = f.id
316 JOIN files_archive_map af ON f.id = af.file_id
317 JOIN component c ON af.component_id = c.id
318 JOIN archive ON af.archive_id = archive.id
319 WHERE archive.name = 'ftp-master'
321 psql -F' ' -A -t -c "$query"
324 SELECT './pool/' || c.name || '/' || f.filename
325 FROM bin_associations ba
326 JOIN binaries b ON ba.bin = b.id
327 JOIN files f ON b.file = f.id
328 JOIN files_archive_map af ON f.id = af.file_id
329 JOIN component c ON af.component_id = c.id
330 JOIN archive ON af.archive_id = archive.id
331 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
333 psql -F' ' -A -t -c "$query"
336 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
337 while read id suite; do
338 [ -e $base/ftp/dists/$suite ] || continue
341 distname=$(cd dists; readlink $suite || echo $suite)
342 find ./dists/$distname \! -type d
343 for distdir in ./dists/*; do
344 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
348 ) | sort -u | gzip -9 > suite-${suite}.list.gz
351 log "Finding everything on the ftp site to generate sundries"
352 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
355 zcat *.list.gz | cat - *.list | sort -u |
356 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
358 log "Generating files list"
361 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
362 cat - sundries.list dists.list project.list docs.list indices.list |
363 sort -u | poolfirst > ../arch-$a.files
367 for dist in sid jessie stretch; do
368 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
372 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
373 sort -u | poolfirst > ../typical.files
380 function mkchecksums() {
381 local archiveroot dsynclist md5list
383 for archive in "${public_archives[@]}"; do
384 archiveroot="$(get_archiveroot "${archive}")"
385 dsynclist=$dbdir/dsync.${archive}.list
386 md5list=${archiveroot}/indices/md5sums
388 log -n "Creating md5 / dsync index file for ${archive}... "
391 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
392 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
393 ${bindir}/dsync-flist -q link-dups $dsynclist || true
398 local archiveroot mirrordir
400 log "Regenerating \"public\" mirror/ hardlink fun"
401 DATE_SERIAL=$(date +"%Y%m%d01")
402 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
403 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
404 SERIAL="$DATE_SERIAL"
406 SERIAL="$FILESOAPLUS1"
408 date -u > ${TRACEFILE}
409 echo "Using dak v1" >> ${TRACEFILE}
410 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
411 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
413 # Ugly "hack", but hey, it does what we want.
414 cp ${TRACEFILE} ${TRACEFILE_BDO}
416 for archive in "${public_archives[@]}"; do
417 archiveroot="$(get_archiveroot "${archive}")"
418 mirrordir="${archiveroot}/../mirror"
420 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
425 log "Expiring old database dumps..."
427 $scriptsdir/expire_dumps -d . -p -f "dump_*"
430 function transitionsclean() {
431 log "Removing out of date transitions..."
433 dak transitions -c -a
437 log "Updating DM permissions page"
438 dak acl export-per-source dm >$exportdir/dm.txt
442 log "Categorizing uncategorized bugs filed against ftp.debian.org"
443 sudo -u dak-unpriv dak bts-categorize
446 function ddaccess() {
447 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
448 log "Trigger dd accessible parts sync including ftp dir"
449 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
452 function mirrorpush() {
453 log "Checking the public archive copy"
454 cd ${mirrordir}/dists
457 for release in $(find . -name "InRelease"); do
458 echo "Processing: ${release}"
459 subdir=${release%/InRelease}
460 while read SHASUM SIZE NAME; do
461 if ! [ -f "${subdir}/${NAME}" ]; then
462 bname=$(basename ${NAME})
463 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
464 # We don't keep unpacked files, don't check for their existance.
465 # We might want to go and check their unpacked shasum, but right now
466 # I don't care. I believe it should be enough if all the packed shasums
470 broken=$(( broken + 1 ))
471 echo "File ${subdir}/${NAME} is missing"
475 # We do have symlinks in the tree (see the contents files currently).
476 # So we use "readlink -f" to check the size of the target, as thats basically
477 # what gen-releases does
478 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
479 if [ ${fsize} -ne ${SIZE} ]; then
480 broken=$(( broken + 1 ))
481 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
485 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
486 fshasum=${fshasum%% *}
487 if [ "${fshasum}" != "${SHASUM}" ]; then
488 broken=$(( broken + 1 ))
489 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
492 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
495 if [ $broken -gt 0 ]; then
496 log_error "Trouble with the public mirror, found ${broken} errors"
500 log "Starting the mirrorpush"
501 date -u > /srv/ftp.debian.org/web/mirrorstart
502 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
503 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
504 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
505 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
508 function mirrorpush-backports() {
509 log "Syncing backports mirror"
510 sudo -u backports /home/backports/bin/update-archive
514 log "Exporting package data foo for i18n project"
515 STAMP=$(date "+%Y%m%d%H%M")
516 mkdir -p ${scriptdir}/i18n/${STAMP}
517 cd ${scriptdir}/i18n/${STAMP}
518 for suite in stable testing unstable; do
519 codename=$(dak admin s show ${suite}|grep '^Codename')
520 codename=${codename##* }
521 echo "Codename is ${codename}"
522 dak control-suite -l ${suite} >${codename}
524 echo "${STAMP}" > timestamp
525 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
529 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
532 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
536 log "Updating stats data"
538 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
539 R --slave --vanilla < $base/misc/ftpstats.R
540 dak stats arch-space > $webdir/arch-space
541 dak stats pkg-nums > $webdir/pkg-nums
544 function cleantransactions() {
545 log "Cleanup transaction ids older than 3 months"
547 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
550 function logstats() {
551 $masterdir/tools/logs.py "$1"
554 # save timestamp when we start
555 function savetimestamp() {
556 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
557 echo ${NOW} > "${dbdir}/dinstallstart"
560 function maillogfile() {
561 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
564 function renamelogfile() {
565 if [ -f "${dbdir}/dinstallstart" ]; then
566 NOW=$(cat "${dbdir}/dinstallstart")
568 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
569 logstats "$logdir/dinstall_${NOW}.log"
570 bzip2 -9 "$logdir/dinstall_${NOW}.log"
572 error "Problem, I don't know when dinstall started, unable to do log statistics."
573 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
575 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
576 bzip2 -9 "$logdir/dinstall_${NOW}.log"
580 function testingsourcelist() {
581 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
584 # do a last run of process-unchecked before dinstall is on.
585 function process_unchecked() {
586 log "Processing the unchecked queue"
587 UNCHECKED_WITHOUT_LOCK="-p"
592 # Function to update a "statefile" telling people what we are doing
595 # This should be called with the argument(s)
596 # - Status name we want to show.
599 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
600 cat >"${DINSTALLSTATE}" <<EOF
601 Dinstall start: ${DINSTALLBEGIN}
603 Action start: ${RIGHTNOW}
607 # extract changelogs and stuff
608 function changelogs() {
609 if lockfile -r3 $LOCK_CHANGELOG; then
610 log "Extracting changelogs"
611 dak make-changelog -e -a ftp-master
612 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
613 mkdir -p ${exportpublic}/changelogs
614 cd ${exportpublic}/changelogs
615 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
616 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
618 dak make-changelog -e -a backports
619 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
620 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
621 cd /srv/backports-master.debian.org/rsync/export/changelogs
622 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
623 remove_changelog_lock
624 trap remove_changelog_lock EXIT TERM HUP INT QUIT
628 function gitpdiff() {
629 # Might be that we want to change this to have more than one git repository.
630 # Advantage of one is that we do not need much space in terms of storage in git itself,
631 # git gc is pretty good on our input.
632 # But it might be faster. Well, lets test.
633 log "Adjusting the git tree for pdiffs"
636 # The regex needs the architectures seperated with \|
637 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
639 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
641 # Also, we only want contents, packages and sources.
642 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
651 # Second, add all there is into git
654 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
656 TAGD=$(date +%Y-%m-%d-%H-%M)
657 git commit -m "Commit of ${COMD}"