2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 function remove_locks {
24 trap - EXIT TERM HUP INT QUIT
25 ts "locked part finished"
28 function lockaccepted {
29 lockfile "$LOCK_ACCEPTED"
30 trap remove_all_locks EXIT TERM HUP INT QUIT
33 # If we error out this one is called, *FOLLOWED* by cleanup above
35 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
37 subject="ATTENTION ATTENTION!"
38 if [ "${error}" = "false" ]; then
39 subject="${subject} (continued)"
41 subject="${subject} (interrupted)"
43 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
45 if [ -r "${STAGEFILE}.log" ]; then
46 cat "${STAGEFILE}.log"
48 echo "file ${STAGEFILE}.log does not exist, sorry"
49 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
52 ########################################################################
53 # the actual dinstall functions follow #
54 ########################################################################
56 # pushing merkels QA user, part one
58 log "Telling QA user that we start dinstall"
59 ssh -n -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
62 # Updating various files
64 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
66 $scriptsdir/update-bugdoctxt
67 $scriptsdir/update-mirrorlists
68 $scriptsdir/update-mailingliststxt
69 $scriptsdir/update-pseudopackages.sh
72 # The first i18n one, syncing new descriptions
74 log "Synchronizing i18n package descriptions"
75 # First sync their newest data
76 cd ${scriptdir}/i18nsync
77 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
79 # Now check if we still know about the packages for which they created the files
80 # is the timestamp signed by us?
81 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
82 # now read it. As its signed by us we are sure the content is what we expect, no need
83 # to do more here. And we only test -d a directory on it anyway.
84 TSTAMP=$(cat timestamp)
85 # do we have the dir still?
86 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
88 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
89 # Yay, worked, lets copy around
90 for dir in stretch sid; do
91 if [ -d dists/${dir}/ ]; then
92 cd dists/${dir}/main/i18n
93 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
95 cd ${scriptdir}/i18nsync
98 echo "ARRRR, bad guys, wrong files, ARRR"
99 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
102 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
103 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
106 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
107 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
111 # Syncing AppStream/DEP-11 data
113 log "Synchronizing AppStream metadata"
114 # First sync their newest data
115 mkdir -p ${scriptdir}/dep11
116 cd ${scriptdir}/dep11
117 rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true
120 if ${scriptsdir}/dep11-basic-validate.py . ${scriptdir}/dep11/; then
121 # Yay, worked, lets copy around
122 for dir in stretch sid; do
123 if [ -d ${dir}/ ]; then
124 for comp in main contrib non-free; do
125 mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
127 rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
128 cd ${scriptdir}/dep11
133 echo "ARRRR, bad guys, wrong files, ARRR"
134 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
139 log "Checking for cruft in overrides"
143 function dominate() {
144 log "Removing obsolete source and binary associations"
146 dak manage-debug-suites unstable-debug experimental-debug
149 function autocruft() {
150 log "Check for obsolete binary packages"
151 dak auto-decruft -s unstable
152 dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
155 function fingerprints() {
156 log "Updating fingerprints"
157 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
160 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
162 if [ -s "${OUTFILE}" ]; then
163 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
164 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
165 To: <debian-project@lists.debian.org>
166 Subject: Debian Maintainers Keyring changes
167 Content-Type: text/plain; charset=utf-8
171 The following changes to the debian-maintainers keyring have just been activated:
175 Debian distribution maintenance software,
176 on behalf of the Keyring maintainers
183 function overrides() {
184 log "Writing overrides into text files"
192 log "Generating package / file mapping"
193 for archive in "${public_archives[@]}"; do
194 archiveroot="$(get_archiveroot "${archive}")"
195 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
199 function packages() {
200 log "Generating Packages and Sources files"
201 for archive in "${public_archives[@]}"; do
202 log " Generating Packages/Sources for ${archive}"
203 dak generate-packages-sources2 -a "${archive}"
204 log " Generating Contents for ${archive}"
205 dak contents generate -a "${archive}"
210 log "Generating pdiff files"
211 dak generate-index-diffs
215 log "Generating Release files"
216 for archive in "${public_archives[@]}"; do
217 dak generate-releases -a "${archive}"
221 function dakcleanup() {
222 log "Cleanup old packages/files"
223 dak clean-suites -m 10000
224 dak clean-queues -i "$unchecked"
231 for archive in "${public_archives[@]}"; do
232 archiveroot="$(get_archiveroot "${archive}")"
235 log "Removing any core files ..."
236 find -type f -name core -print -delete
238 log "Checking symlinks ..."
241 log "Creating recursive directory listing ... "
243 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
247 function mkmaintainers() {
251 log 'Creating Maintainers index ... '
253 for archive in "${public_archives[@]}"; do
254 archiveroot="$(get_archiveroot "${archive}")"
255 indices="${archiveroot}/indices"
256 if ! [ -d "${indices}" ]; then
261 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
262 gzip -9v --rsyncable <Maintainers >Maintainers.gz
263 gzip -9v --rsyncable <Uploaders >Uploaders.gz
267 function copyoverrides() {
268 log 'Copying override files into public view ...'
272 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
274 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
275 chmod g+w ${indices}/${bname}.gz
280 function mkfilesindices() {
283 cd $base/ftp/indices/files/components
287 log "Querying postgres"
289 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
291 JOIN files_archive_map af ON f.id = af.file_id
292 JOIN component c ON af.component_id = c.id
293 JOIN archive ON af.archive_id = archive.id
296 JOIN architecture a ON b.architecture = a.id)
298 WHERE archive.name = 'ftp-master'
299 ORDER BY path, arch_string
301 psql -At -c "$query" >$ARCHLIST
304 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
307 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
310 log "Generating sources list"
312 sed -n 's/|$//p' $ARCHLIST
314 find ./dists -maxdepth 1 \! -type d
315 find ./dists \! -type d | grep "/source/"
316 ) | sort -u | gzip -9 > source.list.gz
318 log "Generating arch lists"
320 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
322 (sed -n "s/|$a$//p" $ARCHLIST
323 sed -n 's/|all$//p' $ARCHLIST
326 find ./dists -maxdepth 1 \! -type d
327 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
328 ) | sort -u | gzip -9 > arch-$a.list.gz
331 log "Generating suite lists"
334 local suite_id="$(printf %d $1)"
337 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
339 (SELECT sa.source AS source
340 FROM src_associations sa
341 WHERE sa.suite = $suite_id
344 FROM extra_src_references esr
345 JOIN bin_associations ba ON esr.bin_id = ba.bin
346 WHERE ba.suite = $suite_id
348 SELECT b.source AS source
349 FROM bin_associations ba
350 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
351 JOIN dsc_files df ON s.source = df.source
352 JOIN files f ON df.file = f.id
353 JOIN files_archive_map af ON f.id = af.file_id
354 JOIN component c ON af.component_id = c.id
355 JOIN archive ON af.archive_id = archive.id
356 WHERE archive.name = 'ftp-master'
358 psql -F' ' -A -t -c "$query"
361 SELECT './pool/' || c.name || '/' || f.filename
362 FROM bin_associations ba
363 JOIN binaries b ON ba.bin = b.id
364 JOIN files f ON b.file = f.id
365 JOIN files_archive_map af ON f.id = af.file_id
366 JOIN component c ON af.component_id = c.id
367 JOIN archive ON af.archive_id = archive.id
368 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
370 psql -F' ' -A -t -c "$query"
373 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
374 while read id suite; do
375 [ -e $base/ftp/dists/$suite ] || continue
378 distname=$(cd dists; readlink $suite || echo $suite)
379 find ./dists/$distname \! -type d
380 for distdir in ./dists/*; do
381 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
385 ) | sort -u | gzip -9 > suite-${suite}.list.gz
388 log "Finding everything on the ftp site to generate sundries"
389 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
392 zcat *.list.gz | cat - *.list | sort -u |
393 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
395 log "Generating files list"
398 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
399 cat - sundries.list dists.list project.list docs.list indices.list |
400 sort -u | poolfirst > ../arch-$a.files
404 for dist in sid jessie stretch; do
405 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
409 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
410 sort -u | poolfirst > ../typical.files
417 function mkchecksums() {
418 local archiveroot dsynclist md5list
420 for archive in "${public_archives[@]}"; do
421 archiveroot="$(get_archiveroot "${archive}")"
422 dsynclist=$dbdir/dsync.${archive}.list
423 md5list=${archiveroot}/indices/md5sums
425 log -n "Creating md5 / dsync index file for ${archive}... "
428 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
429 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
430 ${bindir}/dsync-flist -q link-dups $dsynclist || true
435 local archiveroot targetpath TRACEFILE
437 for archive in "${public_archives[@]}"; do
438 archiveroot="$(get_archiveroot "${archive}")"
439 targetpath="${mirrordir}/${archive}"
440 TRACEFILE="${archiveroot}/project/trace/ftp-master.debian.org"
441 mkdir -p "${archiveroot}/project/trace/"
443 log "Regenerating \"public\" mirror/${archive} hardlink fun"
444 DATE_SERIAL=$(date +"%Y%m%d01")
445 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} || echo ${DATE_SERIAL} )
446 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
447 SERIAL="$DATE_SERIAL"
449 SERIAL="$FILESOAPLUS1"
451 date -u > ${TRACEFILE}
452 echo "Using dak v1" >> ${TRACEFILE}
453 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
454 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
456 mkdir -p ${targetpath}
458 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
463 log "Expiring old database dumps..."
465 $scriptsdir/expire_dumps -d . -p -f "dump_*"
468 function transitionsclean() {
469 log "Removing out of date transitions..."
471 dak transitions -c -a
475 log "Updating DM permissions page"
476 dak acl export-per-source dm >$exportdir/dm.txt
480 log "Categorizing uncategorized bugs filed against ftp.debian.org"
481 sudo -u dak-unpriv dak bts-categorize
484 function ddaccess() {
485 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
486 log "Trigger dd accessible parts sync including ftp dir"
487 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
490 function mirrorpush() {
491 log "Checking the public archive copies..."
493 local archiveroot targetpath
495 for archive in "${public_archives[@]}"; do
496 log "... archive: ${archive}"
497 archiveroot="$(get_archiveroot "${archive}")"
498 targetpath="${mirrordir}/${archive}"
499 cd ${archiveroot}/dists
502 for release in $(find . -name "InRelease"); do
503 echo "Processing: ${release}"
504 subdir=${release%/InRelease}
505 while read SHASUM SIZE NAME; do
506 if ! [ -f "${subdir}/${NAME}" ]; then
507 bname=$(basename ${NAME})
508 if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-(amd64|i386)\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
510 # We don't keep unpacked files, don't check for their existance.
511 # We might want to go and check their unpacked shasum, but right now
512 # I don't care. I believe it should be enough if all the packed shasums
516 broken=$(( broken + 1 ))
517 echo "File ${subdir}/${NAME} is missing"
521 # We do have symlinks in the tree (see the contents files currently).
522 # So we use "readlink -f" to check the size of the target, as thats basically
523 # what gen-releases does
524 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
525 if [ ${fsize} -ne ${SIZE} ]; then
526 broken=$(( broken + 1 ))
527 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
531 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
532 fshasum=${fshasum%% *}
533 if [ "${fshasum}" != "${SHASUM}" ]; then
534 broken=$(( broken + 1 ))
535 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
538 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
541 if [ $broken -gt 0 ]; then
542 log_error "Trouble with the public mirror for ${archive}, found ${broken} errors"
545 log "Starting the mirrorpush for ${archive}"
555 pusharg="-a backports"
558 fname="mirrorstart.${archive}"
561 date -u > /srv/ftp.debian.org/web/${fname}
562 echo "Using dak v1" >> /srv/ftp.debian.org/web/${fname}
563 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/${fname}
564 sudo -H -u archvsync /home/archvsync/runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 &
569 function mirrorpush-backports() {
570 log "Syncing backports mirror"
571 sudo -u backports /home/backports/bin/update-archive
575 log "Exporting package data foo for i18n project"
576 STAMP=$(date "+%Y%m%d%H%M")
577 mkdir -p ${scriptdir}/i18n/${STAMP}
578 cd ${scriptdir}/i18n/${STAMP}
579 for suite in stable testing unstable; do
580 codename=$(dak admin s show ${suite}|grep '^Codename')
581 codename=${codename##* }
582 echo "Codename is ${codename}"
583 dak control-suite -l ${suite} >${codename}
585 echo "${STAMP}" > timestamp
586 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
590 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
593 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
597 log "Updating stats data"
599 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
600 R --slave --vanilla < $base/misc/ftpstats.R
601 dak stats arch-space > $webdir/arch-space
602 dak stats pkg-nums > $webdir/pkg-nums
605 function cleantransactions() {
606 log "Cleanup transaction ids older than 3 months"
608 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
611 function logstats() {
612 $masterdir/tools/logs.py "$1"
615 # save timestamp when we start
616 function savetimestamp() {
617 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
618 echo ${NOW} > "${dbdir}/dinstallstart"
621 function maillogfile() {
622 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
625 function testingsourcelist() {
626 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
629 # Function to update a "statefile" telling people what we are doing
632 # This should be called with the argument(s)
633 # - Status name we want to show.
636 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
637 cat >"${DINSTALLSTATE}" <<EOF
638 Dinstall start: ${DINSTALLBEGIN}
640 Action start: ${RIGHTNOW}
644 # extract changelogs and stuff
645 function changelogs() {
646 if lockfile -r3 $LOCK_CHANGELOG; then
647 log "Extracting changelogs"
648 dak make-changelog -e -a ftp-master
649 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
650 mkdir -p ${exportpublic}/changelogs
651 cd ${exportpublic}/changelogs
652 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
653 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
655 dak make-changelog -e -a backports
656 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
657 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
658 cd /srv/backports-master.debian.org/rsync/export/changelogs
659 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
660 remove_changelog_lock
661 trap remove_changelog_lock EXIT TERM HUP INT QUIT
665 function gitpdiff() {
666 # Might be that we want to change this to have more than one git repository.
667 # Advantage of one is that we do not need much space in terms of storage in git itself,
668 # git gc is pretty good on our input.
669 # But it might be faster. Well, lets test.
670 log "Adjusting the git tree for pdiffs"
673 # The regex needs the architectures seperated with \|
674 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
676 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
678 # Also, we only want contents, packages and sources.
679 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
688 # Second, add all there is into git
691 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
693 TAGD=$(date +%Y-%m-%d-%H-%M)
694 git commit -m "Commit of ${COMD}"