2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in jessie sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function filelist() {
111 log "Generating file lists for apt-ftparchive"
112 dak generate-filelist
115 function fingerprints() {
116 log "Updating fingerprints"
117 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
120 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
122 if [ -s "${OUTFILE}" ]; then
123 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
124 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
125 To: <debian-project@lists.debian.org>
126 Subject: Debian Maintainers Keyring changes
127 Content-Type: text/plain; charset=utf-8
131 The following changes to the debian-maintainers keyring have just been activated:
135 Debian distribution maintenance software,
136 on behalf of the Keyring maintainers
143 function overrides() {
144 log "Writing overrides into text files"
149 rm -f override.sid.all3
150 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
156 log "Generating package / file mapping"
157 for archive in "${public_archives[@]}"; do
158 archiveroot="$(get_archiveroot "${archive}")"
159 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
163 function packages() {
164 log "Generating Packages and Sources files"
165 for archive in "${public_archives[@]}"; do
166 dak generate-packages-sources2 -a "${archive}"
167 dak contents generate -a "${archive}"
172 log "Generating pdiff files"
173 dak generate-index-diffs
177 # XXX: disable once we can remove i18n/Index (#649314)
178 log "Generating i18n/Index"
181 for dist in testing unstable experimental proposed-updates testing-proposed-updates; do
182 $scriptsdir/generate-i18n-Index $dist;
186 log "Generating Release files"
187 for archive in "${public_archives[@]}"; do
188 dak generate-releases -a "${archive}"
192 function dakcleanup() {
193 log "Cleanup old packages/files"
194 dak clean-suites -m 10000
195 dak clean-queues -i "$unchecked"
202 for archive in "${public_archives[@]}"; do
203 archiveroot="$(get_archiveroot "${archive}")"
206 log "Removing any core files ..."
207 find -type f -name core -print -delete
209 log "Checking symlinks ..."
212 log "Creating recursive directory listing ... "
214 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
218 function mkmaintainers() {
222 log 'Creating Maintainers index ... '
224 for archive in "${public_archives[@]}"; do
225 archiveroot="$(get_archiveroot "${archive}")"
226 indices="${archiveroot}/indices"
227 if ! [ -d "${indices}" ]; then
232 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
233 gzip -9v --rsyncable <Maintainers >Maintainers.gz
234 gzip -9v --rsyncable <Uploaders >Uploaders.gz
238 function copyoverrides() {
239 log 'Copying override files into public view ...'
241 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,sid}.{,extra.}{main,contrib,non-free}*; do
243 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
244 chmod g+w ${indices}/${bname}.gz
248 function mkfilesindices() {
251 cd $base/ftp/indices/files/components
255 log "Querying postgres"
257 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
259 JOIN files_archive_map af ON f.id = af.file_id
260 JOIN component c ON af.component_id = c.id
261 JOIN archive ON af.archive_id = archive.id
264 JOIN architecture a ON b.architecture = a.id)
266 WHERE archive.name = 'ftp-master'
267 ORDER BY path, arch_string
269 psql -At -c "$query" >$ARCHLIST
272 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
275 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
278 log "Generating sources list"
280 sed -n 's/|$//p' $ARCHLIST
282 find ./dists -maxdepth 1 \! -type d
283 find ./dists \! -type d | grep "/source/"
284 ) | sort -u | gzip -9 > source.list.gz
286 log "Generating arch lists"
288 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
290 (sed -n "s/|$a$//p" $ARCHLIST
291 sed -n 's/|all$//p' $ARCHLIST
294 find ./dists -maxdepth 1 \! -type d
295 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
296 ) | sort -u | gzip -9 > arch-$a.list.gz
299 log "Generating suite lists"
302 local suite_id="$(printf %d $1)"
305 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
307 (SELECT sa.source AS source
308 FROM src_associations sa
309 WHERE sa.suite = $suite_id
312 FROM extra_src_references esr
313 JOIN bin_associations ba ON esr.bin_id = ba.bin
314 WHERE ba.suite = $suite_id
316 SELECT b.source AS source
317 FROM bin_associations ba
318 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
319 JOIN dsc_files df ON s.source = df.source
320 JOIN files f ON df.file = f.id
321 JOIN files_archive_map af ON f.id = af.file_id
322 JOIN component c ON af.component_id = c.id
323 JOIN archive ON af.archive_id = archive.id
324 WHERE archive.name = 'ftp-master'
326 psql -F' ' -A -t -c "$query"
329 SELECT './pool/' || c.name || '/' || f.filename
330 FROM bin_associations ba
331 JOIN binaries b ON ba.bin = b.id
332 JOIN files f ON b.file = f.id
333 JOIN files_archive_map af ON f.id = af.file_id
334 JOIN component c ON af.component_id = c.id
335 JOIN archive ON af.archive_id = archive.id
336 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
338 psql -F' ' -A -t -c "$query"
341 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
342 while read id suite; do
343 [ -e $base/ftp/dists/$suite ] || continue
346 distname=$(cd dists; readlink $suite || echo $suite)
347 find ./dists/$distname \! -type d
348 for distdir in ./dists/*; do
349 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
353 ) | sort -u | gzip -9 > suite-${suite}.list.gz
356 log "Finding everything on the ftp site to generate sundries"
357 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
360 zcat *.list.gz | cat - *.list | sort -u |
361 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
363 log "Generating files list"
366 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
367 cat - sundries.list dists.list project.list docs.list indices.list |
368 sort -u | poolfirst > ../arch-$a.files
372 for dist in sid jessie; do
373 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
377 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz) |
378 sort -u | poolfirst > ../typical.files
385 function mkchecksums() {
386 local archiveroot dsynclist md5list
388 for archive in "${public_archives[@]}"; do
389 archiveroot="$(get_archiveroot "${archive}")"
390 dsynclist=$dbdir/dsync.${archive}.list
391 md5list=${archiveroot}/indices/md5sums
393 log -n "Creating md5 / dsync index file for ${archive}... "
396 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
397 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
398 ${bindir}/dsync-flist -q link-dups $dsynclist || true
405 log "Regenerating \"public\" mirror/ hardlink fun"
406 DATE_SERIAL=$(date +"%Y%m%d01")
407 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
408 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
409 SERIAL="$DATE_SERIAL"
411 SERIAL="$FILESOAPLUS1"
413 date -u > ${TRACEFILE}
414 echo "Using dak v1" >> ${TRACEFILE}
415 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
416 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
418 # Ugly "hack", but hey, it does what we want.
419 cp ${TRACEFILE} ${TRACEFILE_BDO}
421 for archive in "${public_archives[@]}"; do
422 archiveroot="$(get_archiveroot "${archive}")"
423 mirrordir="${archiveroot}/../mirror"
425 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
430 log "Expiring old database dumps..."
432 $scriptsdir/expire_dumps -d . -p -f "dump_*"
435 function transitionsclean() {
436 log "Removing out of date transitions..."
438 dak transitions -c -a
442 log "Updating DM permissions page"
443 dak acl export-per-source dm >$exportdir/dm.txt
447 log "Categorizing uncategorized bugs filed against ftp.debian.org"
451 function ddaccess() {
452 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
453 log "Trigger dd accessible parts sync including ftp dir"
454 ${scriptsdir}/sync-dd ries-sync ries-sync1 ries-sync2 pool
457 function mirrorpush() {
458 log "Checking the public archive copy"
459 cd ${mirrordir}/dists
462 for release in $(find . -name "InRelease"); do
463 echo "Processing: ${release}"
464 subdir=${release%/InRelease}
465 while read SHASUM SIZE NAME; do
466 if ! [ -f "${subdir}/${NAME}" ]; then
467 bname=$(basename ${NAME})
468 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
469 # We don't keep unpacked files, don't check for their existance.
470 # We might want to go and check their unpacked shasum, but right now
471 # I don't care. I believe it should be enough if all the packed shasums
475 broken=$(( broken + 1 ))
476 echo "File ${subdir}/${NAME} is missing"
480 # We do have symlinks in the tree (see the contents files currently).
481 # So we use "readlink -f" to check the size of the target, as thats basically
482 # what gen-releases does
483 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
484 if [ ${fsize} -ne ${SIZE} ]; then
485 broken=$(( broken + 1 ))
486 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
490 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
491 fshasum=${fshasum%% *}
492 if [ "${fshasum}" != "${SHASUM}" ]; then
493 broken=$(( broken + 1 ))
494 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
497 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
500 if [ $broken -gt 0 ]; then
501 log_error "Trouble with the public mirror, found ${broken} errors"
505 log "Starting the mirrorpush"
506 date -u > /srv/ftp.debian.org/web/mirrorstart
507 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
508 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
509 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
510 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
514 log "Exporting package data foo for i18n project"
515 STAMP=$(date "+%Y%m%d%H%M")
516 mkdir -p ${scriptdir}/i18n/${STAMP}
517 cd ${scriptdir}/i18n/${STAMP}
518 for suite in stable testing unstable; do
519 codename=$(dak admin s show ${suite}|grep '^Codename')
520 codename=${codename##* }
521 echo "Codename is ${codename}"
522 dak control-suite -l ${suite} >${codename}
524 echo "${STAMP}" > timestamp
525 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
529 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
532 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
536 log "Updating stats data"
538 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
539 R --slave --vanilla < $base/misc/ftpstats.R
540 dak stats arch-space > $webdir/arch-space
541 dak stats pkg-nums > $webdir/pkg-nums
544 function cleantransactions() {
545 log "Cleanup transaction ids older than 3 months"
547 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
550 function logstats() {
551 $masterdir/tools/logs.py "$1"
554 # save timestamp when we start
555 function savetimestamp() {
556 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
557 echo ${NOW} > "${dbdir}/dinstallstart"
560 function maillogfile() {
561 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
564 function renamelogfile() {
565 if [ -f "${dbdir}/dinstallstart" ]; then
566 NOW=$(cat "${dbdir}/dinstallstart")
568 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
569 logstats "$logdir/dinstall_${NOW}.log"
570 bzip2 -9 "$logdir/dinstall_${NOW}.log"
572 error "Problem, I don't know when dinstall started, unable to do log statistics."
573 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
575 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
576 bzip2 -9 "$logdir/dinstall_${NOW}.log"
580 function testingsourcelist() {
581 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
584 # do a last run of process-unchecked before dinstall is on.
585 function process_unchecked() {
586 log "Processing the unchecked queue"
587 UNCHECKED_WITHOUT_LOCK="-p"
592 # Function to update a "statefile" telling people what we are doing
595 # This should be called with the argument(s)
596 # - Status name we want to show.
599 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
600 cat >"${DINSTALLSTATE}" <<EOF
601 Dinstall start: ${DINSTALLBEGIN}
603 Action start: ${RIGHTNOW}
607 # extract changelogs and stuff
608 function changelogs() {
609 if lockfile -r3 $LOCK_CHANGELOG; then
610 log "Extracting changelogs"
611 dak make-changelog -e -a ftp-master
612 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
613 mkdir -p ${exportpublic}/changelogs
614 cd ${exportpublic}/changelogs
615 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
616 sudo -H -u staticsync /usr/local/bin/static-update-component ftp-master.metadata.debian.org >/dev/null 2>&1 &
618 dak make-changelog -e -a backports
619 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
620 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
621 cd /srv/backports-master.debian.org/rsync/export/changelogs
622 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
623 remove_changelog_lock
624 trap remove_changelog_lock EXIT TERM HUP INT QUIT
628 function gitpdiff() {
629 # Might be that we want to change this to have more than one git repository.
630 # Advantage of one is that we do not need much space in terms of storage in git itself,
631 # git gc is pretty good on our input.
632 # But it might be faster. Well, lets test.
633 log "Adjusting the git tree for pdiffs"
636 # The regex needs the architectures seperated with \|
637 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
639 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
641 # Also, we only want contents, packages and sources.
642 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
651 # Second, add all there is into git
654 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
656 TAGD=$(date +%Y-%m-%d-%H-%M)
657 git commit -m "Commit of ${COMD}"