2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in jessie sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function filelist() {
111 log "Generating file lists for apt-ftparchive"
112 dak generate-filelist
115 function fingerprints() {
116 log "Updating fingerprints"
117 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
120 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
122 if [ -s "${OUTFILE}" ]; then
123 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
124 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
125 To: <debian-project@lists.debian.org>
126 Subject: Debian Maintainers Keyring changes
127 Content-Type: text/plain; charset=utf-8
131 The following changes to the debian-maintainers keyring have just been activated:
135 Debian distribution maintenance software,
136 on behalf of the Keyring maintainers
143 function overrides() {
144 log "Writing overrides into text files"
149 rm -f override.sid.all3
150 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
156 log "Generating package / file mapping"
157 for archive in "${public_archives[@]}"; do
158 archiveroot="$(get_archiveroot "${archive}")"
159 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
163 function packages() {
164 log "Generating Packages and Sources files"
165 for archive in "${public_archives[@]}"; do
166 log " Generating Packages/Sources for ${archive}"
167 dak generate-packages-sources2 -a "${archive}"
168 log " Generating Contents for ${archive}"
169 dak contents generate -a "${archive}"
174 log "Generating pdiff files"
175 dak generate-index-diffs
179 log "Generating Release files"
180 for archive in "${public_archives[@]}"; do
181 dak generate-releases -a "${archive}"
185 function dakcleanup() {
186 log "Cleanup old packages/files"
187 dak clean-suites -m 10000
188 dak clean-queues -i "$unchecked"
195 for archive in "${public_archives[@]}"; do
196 archiveroot="$(get_archiveroot "${archive}")"
199 log "Removing any core files ..."
200 find -type f -name core -print -delete
202 log "Checking symlinks ..."
205 log "Creating recursive directory listing ... "
207 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
211 function mkmaintainers() {
215 log 'Creating Maintainers index ... '
217 for archive in "${public_archives[@]}"; do
218 archiveroot="$(get_archiveroot "${archive}")"
219 indices="${archiveroot}/indices"
220 if ! [ -d "${indices}" ]; then
225 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
226 gzip -9v --rsyncable <Maintainers >Maintainers.gz
227 gzip -9v --rsyncable <Uploaders >Uploaders.gz
231 function copyoverrides() {
232 log 'Copying override files into public view ...'
234 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,sid}.{,extra.}{main,contrib,non-free}*; do
236 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
237 chmod g+w ${indices}/${bname}.gz
241 function mkfilesindices() {
244 cd $base/ftp/indices/files/components
248 log "Querying postgres"
250 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
252 JOIN files_archive_map af ON f.id = af.file_id
253 JOIN component c ON af.component_id = c.id
254 JOIN archive ON af.archive_id = archive.id
257 JOIN architecture a ON b.architecture = a.id)
259 WHERE archive.name = 'ftp-master'
260 ORDER BY path, arch_string
262 psql -At -c "$query" >$ARCHLIST
265 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
268 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
271 log "Generating sources list"
273 sed -n 's/|$//p' $ARCHLIST
275 find ./dists -maxdepth 1 \! -type d
276 find ./dists \! -type d | grep "/source/"
277 ) | sort -u | gzip -9 > source.list.gz
279 log "Generating arch lists"
281 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
283 (sed -n "s/|$a$//p" $ARCHLIST
284 sed -n 's/|all$//p' $ARCHLIST
287 find ./dists -maxdepth 1 \! -type d
288 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
289 ) | sort -u | gzip -9 > arch-$a.list.gz
292 log "Generating suite lists"
295 local suite_id="$(printf %d $1)"
298 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
300 (SELECT sa.source AS source
301 FROM src_associations sa
302 WHERE sa.suite = $suite_id
305 FROM extra_src_references esr
306 JOIN bin_associations ba ON esr.bin_id = ba.bin
307 WHERE ba.suite = $suite_id
309 SELECT b.source AS source
310 FROM bin_associations ba
311 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
312 JOIN dsc_files df ON s.source = df.source
313 JOIN files f ON df.file = f.id
314 JOIN files_archive_map af ON f.id = af.file_id
315 JOIN component c ON af.component_id = c.id
316 JOIN archive ON af.archive_id = archive.id
317 WHERE archive.name = 'ftp-master'
319 psql -F' ' -A -t -c "$query"
322 SELECT './pool/' || c.name || '/' || f.filename
323 FROM bin_associations ba
324 JOIN binaries b ON ba.bin = b.id
325 JOIN files f ON b.file = f.id
326 JOIN files_archive_map af ON f.id = af.file_id
327 JOIN component c ON af.component_id = c.id
328 JOIN archive ON af.archive_id = archive.id
329 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
331 psql -F' ' -A -t -c "$query"
334 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
335 while read id suite; do
336 [ -e $base/ftp/dists/$suite ] || continue
339 distname=$(cd dists; readlink $suite || echo $suite)
340 find ./dists/$distname \! -type d
341 for distdir in ./dists/*; do
342 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
346 ) | sort -u | gzip -9 > suite-${suite}.list.gz
349 log "Finding everything on the ftp site to generate sundries"
350 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
353 zcat *.list.gz | cat - *.list | sort -u |
354 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
356 log "Generating files list"
359 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
360 cat - sundries.list dists.list project.list docs.list indices.list |
361 sort -u | poolfirst > ../arch-$a.files
365 for dist in sid jessie; do
366 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
370 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz) |
371 sort -u | poolfirst > ../typical.files
378 function mkchecksums() {
379 local archiveroot dsynclist md5list
381 for archive in "${public_archives[@]}"; do
382 archiveroot="$(get_archiveroot "${archive}")"
383 dsynclist=$dbdir/dsync.${archive}.list
384 md5list=${archiveroot}/indices/md5sums
386 log -n "Creating md5 / dsync index file for ${archive}... "
389 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
390 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
391 ${bindir}/dsync-flist -q link-dups $dsynclist || true
398 log "Regenerating \"public\" mirror/ hardlink fun"
399 DATE_SERIAL=$(date +"%Y%m%d01")
400 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
401 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
402 SERIAL="$DATE_SERIAL"
404 SERIAL="$FILESOAPLUS1"
406 date -u > ${TRACEFILE}
407 echo "Using dak v1" >> ${TRACEFILE}
408 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
409 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
411 # Ugly "hack", but hey, it does what we want.
412 cp ${TRACEFILE} ${TRACEFILE_BDO}
414 for archive in "${public_archives[@]}"; do
415 archiveroot="$(get_archiveroot "${archive}")"
416 mirrordir="${archiveroot}/../mirror"
418 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
423 log "Expiring old database dumps..."
425 $scriptsdir/expire_dumps -d . -p -f "dump_*"
428 function transitionsclean() {
429 log "Removing out of date transitions..."
431 dak transitions -c -a
435 log "Updating DM permissions page"
436 dak acl export-per-source dm >$exportdir/dm.txt
440 log "Categorizing uncategorized bugs filed against ftp.debian.org"
444 function ddaccess() {
445 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
446 log "Trigger dd accessible parts sync including ftp dir"
447 ${scriptsdir}/sync-dd ries-sync ries-sync1 ries-sync2 pool
450 function mirrorpush() {
451 log "Checking the public archive copy"
452 cd ${mirrordir}/dists
455 for release in $(find . -name "InRelease"); do
456 echo "Processing: ${release}"
457 subdir=${release%/InRelease}
458 while read SHASUM SIZE NAME; do
459 if ! [ -f "${subdir}/${NAME}" ]; then
460 bname=$(basename ${NAME})
461 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
462 # We don't keep unpacked files, don't check for their existance.
463 # We might want to go and check their unpacked shasum, but right now
464 # I don't care. I believe it should be enough if all the packed shasums
468 broken=$(( broken + 1 ))
469 echo "File ${subdir}/${NAME} is missing"
473 # We do have symlinks in the tree (see the contents files currently).
474 # So we use "readlink -f" to check the size of the target, as thats basically
475 # what gen-releases does
476 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
477 if [ ${fsize} -ne ${SIZE} ]; then
478 broken=$(( broken + 1 ))
479 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
483 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
484 fshasum=${fshasum%% *}
485 if [ "${fshasum}" != "${SHASUM}" ]; then
486 broken=$(( broken + 1 ))
487 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
490 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
493 if [ $broken -gt 0 ]; then
494 log_error "Trouble with the public mirror, found ${broken} errors"
498 log "Starting the mirrorpush"
499 date -u > /srv/ftp.debian.org/web/mirrorstart
500 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
501 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
502 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
503 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
506 function mirrorpush-backports() {
507 log "Syncing backports mirror"
508 sudo -u backports /home/backports/bin/update-archive
512 log "Exporting package data foo for i18n project"
513 STAMP=$(date "+%Y%m%d%H%M")
514 mkdir -p ${scriptdir}/i18n/${STAMP}
515 cd ${scriptdir}/i18n/${STAMP}
516 for suite in stable testing unstable; do
517 codename=$(dak admin s show ${suite}|grep '^Codename')
518 codename=${codename##* }
519 echo "Codename is ${codename}"
520 dak control-suite -l ${suite} >${codename}
522 echo "${STAMP}" > timestamp
523 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
527 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
530 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
534 log "Updating stats data"
536 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
537 R --slave --vanilla < $base/misc/ftpstats.R
538 dak stats arch-space > $webdir/arch-space
539 dak stats pkg-nums > $webdir/pkg-nums
542 function cleantransactions() {
543 log "Cleanup transaction ids older than 3 months"
545 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
548 function logstats() {
549 $masterdir/tools/logs.py "$1"
552 # save timestamp when we start
553 function savetimestamp() {
554 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
555 echo ${NOW} > "${dbdir}/dinstallstart"
558 function maillogfile() {
559 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
562 function renamelogfile() {
563 if [ -f "${dbdir}/dinstallstart" ]; then
564 NOW=$(cat "${dbdir}/dinstallstart")
566 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
567 logstats "$logdir/dinstall_${NOW}.log"
568 bzip2 -9 "$logdir/dinstall_${NOW}.log"
570 error "Problem, I don't know when dinstall started, unable to do log statistics."
571 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
573 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
574 bzip2 -9 "$logdir/dinstall_${NOW}.log"
578 function testingsourcelist() {
579 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
582 # do a last run of process-unchecked before dinstall is on.
583 function process_unchecked() {
584 log "Processing the unchecked queue"
585 UNCHECKED_WITHOUT_LOCK="-p"
590 # Function to update a "statefile" telling people what we are doing
593 # This should be called with the argument(s)
594 # - Status name we want to show.
597 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
598 cat >"${DINSTALLSTATE}" <<EOF
599 Dinstall start: ${DINSTALLBEGIN}
601 Action start: ${RIGHTNOW}
605 # extract changelogs and stuff
606 function changelogs() {
607 if lockfile -r3 $LOCK_CHANGELOG; then
608 log "Extracting changelogs"
609 dak make-changelog -e -a ftp-master
610 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
611 mkdir -p ${exportpublic}/changelogs
612 cd ${exportpublic}/changelogs
613 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
614 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
616 dak make-changelog -e -a backports
617 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
618 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
619 cd /srv/backports-master.debian.org/rsync/export/changelogs
620 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
621 remove_changelog_lock
622 trap remove_changelog_lock EXIT TERM HUP INT QUIT
626 function gitpdiff() {
627 # Might be that we want to change this to have more than one git repository.
628 # Advantage of one is that we do not need much space in terms of storage in git itself,
629 # git gc is pretty good on our input.
630 # But it might be faster. Well, lets test.
631 log "Adjusting the git tree for pdiffs"
634 # The regex needs the architectures seperated with \|
635 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
637 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
639 # Also, we only want contents, packages and sources.
640 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
649 # Second, add all there is into git
652 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
654 TAGD=$(date +%Y-%m-%d-%H-%M)
655 git commit -m "Commit of ${COMD}"