2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
100 # Syncing AppStream/DEP-11 data
102 log "Synchronizing AppStream metadata"
103 # First sync their newest data
104 mkdir -p ${scriptdir}/dep11
105 cd ${scriptdir}/dep11
106 rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true
109 if ${scriptsdir}/dep11-basic-validate.py . ${scriptdir}/dep11/; then
110 # Yay, worked, lets copy around
111 for dir in stretch sid; do
112 if [ -d ${dir}/ ]; then
113 for comp in main contrib non-free; do
114 cd dists/${dir}/${comp}/dep11
115 rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
116 cd ${scriptdir}/dep11
121 echo "ARRRR, bad guys, wrong files, ARRR"
122 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
127 log "Checking for cruft in overrides"
131 function dominate() {
132 log "Removing obsolete source and binary associations"
136 function autocruft() {
137 log "Check for obsolete binary packages"
138 dak auto-decruft -s unstable
139 dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
142 function fingerprints() {
143 log "Updating fingerprints"
144 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
147 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
149 if [ -s "${OUTFILE}" ]; then
150 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
151 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
152 To: <debian-project@lists.debian.org>
153 Subject: Debian Maintainers Keyring changes
154 Content-Type: text/plain; charset=utf-8
158 The following changes to the debian-maintainers keyring have just been activated:
162 Debian distribution maintenance software,
163 on behalf of the Keyring maintainers
170 function overrides() {
171 log "Writing overrides into text files"
179 log "Generating package / file mapping"
180 for archive in "${public_archives[@]}"; do
181 archiveroot="$(get_archiveroot "${archive}")"
182 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
186 function packages() {
187 log "Generating Packages and Sources files"
188 for archive in "${public_archives[@]}"; do
189 log " Generating Packages/Sources for ${archive}"
190 dak generate-packages-sources2 -a "${archive}"
191 log " Generating Contents for ${archive}"
192 dak contents generate -a "${archive}"
197 log "Generating pdiff files"
198 dak generate-index-diffs
202 log "Generating Release files"
203 for archive in "${public_archives[@]}"; do
204 dak generate-releases -a "${archive}"
208 function dakcleanup() {
209 log "Cleanup old packages/files"
210 dak clean-suites -m 10000
211 dak clean-queues -i "$unchecked"
218 for archive in "${public_archives[@]}"; do
219 archiveroot="$(get_archiveroot "${archive}")"
222 log "Removing any core files ..."
223 find -type f -name core -print -delete
225 log "Checking symlinks ..."
228 log "Creating recursive directory listing ... "
230 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
234 function mkmaintainers() {
238 log 'Creating Maintainers index ... '
240 for archive in "${public_archives[@]}"; do
241 archiveroot="$(get_archiveroot "${archive}")"
242 indices="${archiveroot}/indices"
243 if ! [ -d "${indices}" ]; then
248 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
249 gzip -9v --rsyncable <Maintainers >Maintainers.gz
250 gzip -9v --rsyncable <Uploaders >Uploaders.gz
254 function copyoverrides() {
255 log 'Copying override files into public view ...'
259 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
261 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
262 chmod g+w ${indices}/${bname}.gz
267 function mkfilesindices() {
270 cd $base/ftp/indices/files/components
274 log "Querying postgres"
276 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
278 JOIN files_archive_map af ON f.id = af.file_id
279 JOIN component c ON af.component_id = c.id
280 JOIN archive ON af.archive_id = archive.id
283 JOIN architecture a ON b.architecture = a.id)
285 WHERE archive.name = 'ftp-master'
286 ORDER BY path, arch_string
288 psql -At -c "$query" >$ARCHLIST
291 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
294 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
297 log "Generating sources list"
299 sed -n 's/|$//p' $ARCHLIST
301 find ./dists -maxdepth 1 \! -type d
302 find ./dists \! -type d | grep "/source/"
303 ) | sort -u | gzip -9 > source.list.gz
305 log "Generating arch lists"
307 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
309 (sed -n "s/|$a$//p" $ARCHLIST
310 sed -n 's/|all$//p' $ARCHLIST
313 find ./dists -maxdepth 1 \! -type d
314 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
315 ) | sort -u | gzip -9 > arch-$a.list.gz
318 log "Generating suite lists"
321 local suite_id="$(printf %d $1)"
324 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
326 (SELECT sa.source AS source
327 FROM src_associations sa
328 WHERE sa.suite = $suite_id
331 FROM extra_src_references esr
332 JOIN bin_associations ba ON esr.bin_id = ba.bin
333 WHERE ba.suite = $suite_id
335 SELECT b.source AS source
336 FROM bin_associations ba
337 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
338 JOIN dsc_files df ON s.source = df.source
339 JOIN files f ON df.file = f.id
340 JOIN files_archive_map af ON f.id = af.file_id
341 JOIN component c ON af.component_id = c.id
342 JOIN archive ON af.archive_id = archive.id
343 WHERE archive.name = 'ftp-master'
345 psql -F' ' -A -t -c "$query"
348 SELECT './pool/' || c.name || '/' || f.filename
349 FROM bin_associations ba
350 JOIN binaries b ON ba.bin = b.id
351 JOIN files f ON b.file = f.id
352 JOIN files_archive_map af ON f.id = af.file_id
353 JOIN component c ON af.component_id = c.id
354 JOIN archive ON af.archive_id = archive.id
355 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
357 psql -F' ' -A -t -c "$query"
360 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
361 while read id suite; do
362 [ -e $base/ftp/dists/$suite ] || continue
365 distname=$(cd dists; readlink $suite || echo $suite)
366 find ./dists/$distname \! -type d
367 for distdir in ./dists/*; do
368 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
372 ) | sort -u | gzip -9 > suite-${suite}.list.gz
375 log "Finding everything on the ftp site to generate sundries"
376 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
379 zcat *.list.gz | cat - *.list | sort -u |
380 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
382 log "Generating files list"
385 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
386 cat - sundries.list dists.list project.list docs.list indices.list |
387 sort -u | poolfirst > ../arch-$a.files
391 for dist in sid jessie stretch; do
392 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
396 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
397 sort -u | poolfirst > ../typical.files
404 function mkchecksums() {
405 local archiveroot dsynclist md5list
407 for archive in "${public_archives[@]}"; do
408 archiveroot="$(get_archiveroot "${archive}")"
409 dsynclist=$dbdir/dsync.${archive}.list
410 md5list=${archiveroot}/indices/md5sums
412 log -n "Creating md5 / dsync index file for ${archive}... "
415 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
416 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
417 ${bindir}/dsync-flist -q link-dups $dsynclist || true
422 local archiveroot mirrordir
424 log "Regenerating \"public\" mirror/ hardlink fun"
425 DATE_SERIAL=$(date +"%Y%m%d01")
426 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
427 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
428 SERIAL="$DATE_SERIAL"
430 SERIAL="$FILESOAPLUS1"
432 date -u > ${TRACEFILE}
433 echo "Using dak v1" >> ${TRACEFILE}
434 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
435 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
437 # Ugly "hack", but hey, it does what we want.
438 cp ${TRACEFILE} ${TRACEFILE_BDO}
440 for archive in "${public_archives[@]}"; do
441 archiveroot="$(get_archiveroot "${archive}")"
442 mirrordir="${archiveroot}/../mirror"
444 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
449 log "Expiring old database dumps..."
451 $scriptsdir/expire_dumps -d . -p -f "dump_*"
454 function transitionsclean() {
455 log "Removing out of date transitions..."
457 dak transitions -c -a
461 log "Updating DM permissions page"
462 dak acl export-per-source dm >$exportdir/dm.txt
466 log "Categorizing uncategorized bugs filed against ftp.debian.org"
467 sudo -u dak-unpriv dak bts-categorize
470 function ddaccess() {
471 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
472 log "Trigger dd accessible parts sync including ftp dir"
473 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
476 function mirrorpush() {
477 log "Checking the public archive copy"
478 cd ${mirrordir}/dists
481 for release in $(find . -name "InRelease"); do
482 echo "Processing: ${release}"
483 subdir=${release%/InRelease}
484 while read SHASUM SIZE NAME; do
485 if ! [ -f "${subdir}/${NAME}" ]; then
486 bname=$(basename ${NAME})
487 if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+)$ ]]; then
488 # We don't keep unpacked files, don't check for their existance.
489 # We might want to go and check their unpacked shasum, but right now
490 # I don't care. I believe it should be enough if all the packed shasums
494 broken=$(( broken + 1 ))
495 echo "File ${subdir}/${NAME} is missing"
499 # We do have symlinks in the tree (see the contents files currently).
500 # So we use "readlink -f" to check the size of the target, as thats basically
501 # what gen-releases does
502 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
503 if [ ${fsize} -ne ${SIZE} ]; then
504 broken=$(( broken + 1 ))
505 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
509 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
510 fshasum=${fshasum%% *}
511 if [ "${fshasum}" != "${SHASUM}" ]; then
512 broken=$(( broken + 1 ))
513 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
516 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
519 if [ $broken -gt 0 ]; then
520 log_error "Trouble with the public mirror, found ${broken} errors"
524 log "Starting the mirrorpush"
525 date -u > /srv/ftp.debian.org/web/mirrorstart
526 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
527 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
528 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
529 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
532 function mirrorpush-backports() {
533 log "Syncing backports mirror"
534 sudo -u backports /home/backports/bin/update-archive
538 log "Exporting package data foo for i18n project"
539 STAMP=$(date "+%Y%m%d%H%M")
540 mkdir -p ${scriptdir}/i18n/${STAMP}
541 cd ${scriptdir}/i18n/${STAMP}
542 for suite in stable testing unstable; do
543 codename=$(dak admin s show ${suite}|grep '^Codename')
544 codename=${codename##* }
545 echo "Codename is ${codename}"
546 dak control-suite -l ${suite} >${codename}
548 echo "${STAMP}" > timestamp
549 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
553 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
556 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
560 log "Updating stats data"
562 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
563 R --slave --vanilla < $base/misc/ftpstats.R
564 dak stats arch-space > $webdir/arch-space
565 dak stats pkg-nums > $webdir/pkg-nums
568 function cleantransactions() {
569 log "Cleanup transaction ids older than 3 months"
571 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
574 function logstats() {
575 $masterdir/tools/logs.py "$1"
578 # save timestamp when we start
579 function savetimestamp() {
580 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
581 echo ${NOW} > "${dbdir}/dinstallstart"
584 function maillogfile() {
585 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
588 function renamelogfile() {
589 if [ -f "${dbdir}/dinstallstart" ]; then
590 NOW=$(cat "${dbdir}/dinstallstart")
592 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
593 logstats "$logdir/dinstall_${NOW}.log"
594 bzip2 -9 "$logdir/dinstall_${NOW}.log"
596 error "Problem, I don't know when dinstall started, unable to do log statistics."
597 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
599 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
600 bzip2 -9 "$logdir/dinstall_${NOW}.log"
604 function testingsourcelist() {
605 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
608 # do a last run of process-unchecked before dinstall is on.
609 function process_unchecked() {
610 log "Processing the unchecked queue"
611 UNCHECKED_WITHOUT_LOCK="-p"
616 # Function to update a "statefile" telling people what we are doing
619 # This should be called with the argument(s)
620 # - Status name we want to show.
623 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
624 cat >"${DINSTALLSTATE}" <<EOF
625 Dinstall start: ${DINSTALLBEGIN}
627 Action start: ${RIGHTNOW}
631 # extract changelogs and stuff
632 function changelogs() {
633 if lockfile -r3 $LOCK_CHANGELOG; then
634 log "Extracting changelogs"
635 dak make-changelog -e -a ftp-master
636 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
637 mkdir -p ${exportpublic}/changelogs
638 cd ${exportpublic}/changelogs
639 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
640 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
642 dak make-changelog -e -a backports
643 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
644 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
645 cd /srv/backports-master.debian.org/rsync/export/changelogs
646 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
647 remove_changelog_lock
648 trap remove_changelog_lock EXIT TERM HUP INT QUIT
652 function gitpdiff() {
653 # Might be that we want to change this to have more than one git repository.
654 # Advantage of one is that we do not need much space in terms of storage in git itself,
655 # git gc is pretty good on our input.
656 # But it might be faster. Well, lets test.
657 log "Adjusting the git tree for pdiffs"
660 # The regex needs the architectures seperated with \|
661 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
663 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
665 # Also, we only want contents, packages and sources.
666 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
675 # Second, add all there is into git
678 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
680 TAGD=$(date +%Y-%m-%d-%H-%M)
681 git commit -m "Commit of ${COMD}"