2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in wheezy sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function filelist() {
111 log "Generating file lists for apt-ftparchive"
112 dak generate-filelist
115 function fingerprints() {
116 log "Updating fingerprints"
117 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
120 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
122 if [ -s "${OUTFILE}" ]; then
123 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
124 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
125 To: <debian-project@lists.debian.org>
126 Subject: Debian Maintainers Keyring changes
127 Content-Type: text/plain; charset=utf-8
131 The following changes to the debian-maintainers keyring have just been activated:
135 Debian distribution maintenance software,
136 on behalf of the Keyring maintainers
143 function overrides() {
144 log "Writing overrides into text files"
149 rm -f override.sid.all3
150 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
156 log "Generating package / file mapping"
157 for archive in "${public_archives[@]}"; do
158 archiveroot="$(get_archiveroot "${archive}")"
159 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
163 function packages() {
164 log "Generating Packages and Sources files"
165 for archive in "${public_archives[@]}"; do
166 dak generate-packages-sources2 -a "${archive}"
167 dak contents generate -a "${archive}"
172 log "Generating pdiff files"
173 dak generate-index-diffs
177 # XXX: disable once we can remove i18n/Index (#649314)
178 log "Generating i18n/Index"
181 for dist in testing unstable experimental proposed-updates testing-proposed-updates; do
182 $scriptsdir/generate-i18n-Index $dist;
186 log "Generating Release files"
187 for archive in "${public_archives[@]}"; do
188 dak generate-releases -a "${archive}"
192 function dakcleanup() {
193 log "Cleanup old packages/files"
194 dak clean-suites -m 10000
195 dak clean-queues -i "$unchecked"
198 function buildd_dir() {
199 # Rebuilt the buildd dir to avoid long times of 403
200 log "Regenerating the buildd incoming dir"
201 STAMP=$(date "+%Y%m%d%H%M")
209 for archive in "${public_archives[@]}"; do
210 archiveroot="$(get_archiveroot "${archive}")"
213 log "Removing any core files ..."
214 find -type f -name core -print -delete
216 log "Checking symlinks ..."
219 log "Creating recursive directory listing ... "
221 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
225 function mkmaintainers() {
229 log 'Creating Maintainers index ... '
231 for archive in "${public_archives[@]}"; do
232 archiveroot="$(get_archiveroot "${archive}")"
233 indices="${archiveroot}/indices"
234 if ! [ -d "${indices}" ]; then
239 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
240 gzip -9v --rsyncable <Maintainers >Maintainers.gz
241 gzip -9v --rsyncable <Uploaders >Uploaders.gz
245 function copyoverrides() {
246 log 'Copying override files into public view ...'
248 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,sid}.{,extra.}{main,contrib,non-free}*; do
250 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
251 chmod g+w ${indices}/${bname}.gz
255 function mkfilesindices() {
258 cd $base/ftp/indices/files/components
262 log "Querying postgres"
264 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
266 JOIN files_archive_map af ON f.id = af.file_id
267 JOIN component c ON af.component_id = c.id
268 JOIN archive ON af.archive_id = archive.id
271 JOIN architecture a ON b.architecture = a.id)
273 WHERE archive.name = 'ftp-master'
274 ORDER BY path, arch_string
276 psql -At -c "$query" >$ARCHLIST
279 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
282 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
285 log "Generating sources list"
287 sed -n 's/|$//p' $ARCHLIST
289 find ./dists -maxdepth 1 \! -type d
290 find ./dists \! -type d | grep "/source/"
291 ) | sort -u | gzip -9 > source.list.gz
293 log "Generating arch lists"
295 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
297 (sed -n "s/|$a$//p" $ARCHLIST
298 sed -n 's/|all$//p' $ARCHLIST
301 find ./dists -maxdepth 1 \! -type d
302 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
303 ) | sort -u | gzip -9 > arch-$a.list.gz
306 log "Generating suite lists"
309 local suite_id="$(printf %d $1)"
312 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
314 (SELECT sa.source AS source
315 FROM src_associations sa
316 WHERE sa.suite = $suite_id
319 FROM extra_src_references esr
320 JOIN bin_associations ba ON esr.bin_id = ba.bin
321 WHERE ba.suite = $suite_id
323 SELECT b.source AS source
324 FROM bin_associations ba
325 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
326 JOIN dsc_files df ON s.source = df.source
327 JOIN files f ON df.file = f.id
328 JOIN files_archive_map af ON f.id = af.file_id
329 JOIN component c ON af.component_id = c.id
330 JOIN archive ON af.archive_id = archive.id
331 WHERE archive.name = 'ftp-master'
333 psql -F' ' -A -t -c "$query"
336 SELECT './pool/' || c.name || '/' || f.filename
337 FROM bin_associations ba
338 JOIN binaries b ON ba.bin = b.id
339 JOIN files f ON b.file = f.id
340 JOIN files_archive_map af ON f.id = af.file_id
341 JOIN component c ON af.component_id = c.id
342 JOIN archive ON af.archive_id = archive.id
343 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
345 psql -F' ' -A -t -c "$query"
348 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
349 while read id suite; do
350 [ -e $base/ftp/dists/$suite ] || continue
353 distname=$(cd dists; readlink $suite || echo $suite)
354 find ./dists/$distname \! -type d
355 for distdir in ./dists/*; do
356 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
360 ) | sort -u | gzip -9 > suite-${suite}.list.gz
363 log "Finding everything on the ftp site to generate sundries"
364 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
367 zcat *.list.gz | cat - *.list | sort -u |
368 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
370 log "Generating files list"
373 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
374 cat - sundries.list dists.list project.list docs.list indices.list |
375 sort -u | poolfirst > ../arch-$a.files
379 for dist in sid wheezy; do
380 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
384 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-wheezy.list.gz) |
385 sort -u | poolfirst > ../typical.files
392 function mkchecksums() {
393 local archiveroot dsynclist md5list
395 for archive in "${public_archives[@]}"; do
396 archiveroot="$(get_archiveroot "${archive}")"
397 dsynclist=$dbdir/dsync.${archive}.list
398 md5list=${archiveroot}/indices/md5sums
400 log -n "Creating md5 / dsync index file for ${archive}... "
403 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
404 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
405 ${bindir}/dsync-flist -q link-dups $dsynclist || true
412 log "Regenerating \"public\" mirror/ hardlink fun"
413 DATE_SERIAL=$(date +"%Y%m%d01")
414 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
415 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
416 SERIAL="$DATE_SERIAL"
418 SERIAL="$FILESOAPLUS1"
420 date -u > ${TRACEFILE}
421 echo "Using dak v1" >> ${TRACEFILE}
422 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
423 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
425 # Ugly "hack", but hey, it does what we want.
426 cp ${TRACEFILE} ${TRACEFILE_BDO}
428 for archive in "${public_archives[@]}"; do
429 archiveroot="$(get_archiveroot "${archive}")"
430 mirrordir="${archiveroot}/../mirror"
432 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
437 log "Expiring old database dumps..."
439 $scriptsdir/expire_dumps -d . -p -f "dump_*"
442 function transitionsclean() {
443 log "Removing out of date transitions..."
445 dak transitions -c -a
449 log "Updating DM permissions page"
450 dak acl export-per-source dm >$exportdir/dm.txt
454 log "Categorizing uncategorized bugs filed against ftp.debian.org"
458 function ddaccess() {
459 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
460 log "Trigger dd accessible parts sync including ftp dir"
461 ${scriptsdir}/sync-dd ries-sync ries-sync1 ries-sync2 pool
464 function mirrorpush() {
465 log "Checking the public archive copy"
466 cd ${mirrordir}/dists
469 for release in $(find . -name "InRelease"); do
470 echo "Processing: ${release}"
471 subdir=${release%/InRelease}
472 while read SHASUM SIZE NAME; do
473 if ! [ -f "${subdir}/${NAME}" ]; then
474 bname=$(basename ${NAME})
475 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
476 # We don't keep unpacked files, don't check for their existance.
477 # We might want to go and check their unpacked shasum, but right now
478 # I don't care. I believe it should be enough if all the packed shasums
482 broken=$(( broken + 1 ))
483 echo "File ${subdir}/${NAME} is missing"
487 # We do have symlinks in the tree (see the contents files currently).
488 # So we use "readlink -f" to check the size of the target, as thats basically
489 # what gen-releases does
490 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
491 if [ ${fsize} -ne ${SIZE} ]; then
492 broken=$(( broken + 1 ))
493 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
497 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
498 fshasum=${fshasum%% *}
499 if [ "${fshasum}" != "${SHASUM}" ]; then
500 broken=$(( broken + 1 ))
501 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
504 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
507 if [ $broken -gt 0 ]; then
508 log_error "Trouble with the public mirror, found ${broken} errors"
512 log "Starting the mirrorpush"
513 date -u > /srv/ftp.debian.org/web/mirrorstart
514 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
515 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
516 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
517 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
521 log "Exporting package data foo for i18n project"
522 STAMP=$(date "+%Y%m%d%H%M")
523 mkdir -p ${scriptdir}/i18n/${STAMP}
524 cd ${scriptdir}/i18n/${STAMP}
525 for suite in stable testing unstable; do
526 codename=$(dak admin s show ${suite}|grep '^Codename')
527 codename=${codename##* }
528 echo "Codename is ${codename}"
529 dak control-suite -l ${suite} >${codename}
531 echo "${STAMP}" > timestamp
532 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
536 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
539 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
543 log "Updating stats data"
545 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
546 R --slave --vanilla < $base/misc/ftpstats.R
547 dak stats arch-space > $webdir/arch-space
548 dak stats pkg-nums > $webdir/pkg-nums
551 function cleantransactions() {
552 log "Cleanup transaction ids older than 3 months"
554 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
557 function logstats() {
558 $masterdir/tools/logs.py "$1"
561 # save timestamp when we start
562 function savetimestamp() {
563 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
564 echo ${NOW} > "${dbdir}/dinstallstart"
567 function maillogfile() {
568 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
571 function renamelogfile() {
572 if [ -f "${dbdir}/dinstallstart" ]; then
573 NOW=$(cat "${dbdir}/dinstallstart")
575 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
576 logstats "$logdir/dinstall_${NOW}.log"
577 bzip2 -9 "$logdir/dinstall_${NOW}.log"
579 error "Problem, I don't know when dinstall started, unable to do log statistics."
580 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
582 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
583 bzip2 -9 "$logdir/dinstall_${NOW}.log"
587 function testingsourcelist() {
588 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
591 # do a last run of process-unchecked before dinstall is on.
592 function process_unchecked() {
593 log "Processing the unchecked queue"
594 UNCHECKED_WITHOUT_LOCK="-p"
599 # Function to update a "statefile" telling people what we are doing
602 # This should be called with the argument(s)
603 # - Status name we want to show.
606 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
607 cat >"${DINSTALLSTATE}" <<EOF
608 Dinstall start: ${DINSTALLBEGIN}
610 Action start: ${RIGHTNOW}
614 # extract changelogs and stuff
615 function changelogs() {
616 if lockfile -r3 $LOCK_CHANGELOG; then
617 log "Extracting changelogs"
618 dak make-changelog -e -a ftp-master
619 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
620 mkdir -p ${exportpublic}/changelogs
621 cd ${exportpublic}/changelogs
622 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
623 sudo -H -u staticsync /usr/local/bin/static-update-component ftp-master.metadata.debian.org >/dev/null 2>&1 &
625 #dak make-changelog -e -a backports
626 #[ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
627 #mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
628 #cd /srv/backports-master.debian.org/rsync/export/changelogs
629 #rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
630 trap remove_changelog_lock EXIT TERM HUP INT QUIT
634 function gitpdiff() {
635 # Might be that we want to change this to have more than one git repository.
636 # Advantage of one is that we do not need much space in terms of storage in git itself,
637 # git gc is pretty good on our input.
638 # But it might be faster. Well, lets test.
639 log "Adjusting the git tree for pdiffs"
642 # The regex needs the architectures seperated with \|
643 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
645 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
647 # Also, we only want contents, packages and sources.
648 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
657 # Second, add all there is into git
660 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
662 TAGD=$(date +%Y-%m-%d-%H-%M)
663 git commit -m "Commit of ${COMD}"