2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function filelist() {
111 log "Generating file lists for apt-ftparchive"
112 dak generate-filelist
115 function fingerprints() {
116 log "Updating fingerprints"
117 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
120 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
122 if [ -s "${OUTFILE}" ]; then
123 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
124 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
125 To: <debian-project@lists.debian.org>
126 Subject: Debian Maintainers Keyring changes
127 Content-Type: text/plain; charset=utf-8
131 The following changes to the debian-maintainers keyring have just been activated:
135 Debian distribution maintenance software,
136 on behalf of the Keyring maintainers
143 function overrides() {
144 log "Writing overrides into text files"
152 log "Generating package / file mapping"
153 for archive in "${public_archives[@]}"; do
154 archiveroot="$(get_archiveroot "${archive}")"
155 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
159 function packages() {
160 log "Generating Packages and Sources files"
161 for archive in "${public_archives[@]}"; do
162 log " Generating Packages/Sources for ${archive}"
163 dak generate-packages-sources2 -a "${archive}"
164 log " Generating Contents for ${archive}"
165 dak contents generate -a "${archive}"
170 log "Generating pdiff files"
171 dak generate-index-diffs
175 log "Generating Release files"
176 for archive in "${public_archives[@]}"; do
177 dak generate-releases -a "${archive}"
181 function dakcleanup() {
182 log "Cleanup old packages/files"
183 dak clean-suites -m 10000
184 dak clean-queues -i "$unchecked"
191 for archive in "${public_archives[@]}"; do
192 archiveroot="$(get_archiveroot "${archive}")"
195 log "Removing any core files ..."
196 find -type f -name core -print -delete
198 log "Checking symlinks ..."
201 log "Creating recursive directory listing ... "
203 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
207 function mkmaintainers() {
211 log 'Creating Maintainers index ... '
213 for archive in "${public_archives[@]}"; do
214 archiveroot="$(get_archiveroot "${archive}")"
215 indices="${archiveroot}/indices"
216 if ! [ -d "${indices}" ]; then
221 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
222 gzip -9v --rsyncable <Maintainers >Maintainers.gz
223 gzip -9v --rsyncable <Uploaders >Uploaders.gz
227 function copyoverrides() {
228 log 'Copying override files into public view ...'
230 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
232 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
233 chmod g+w ${indices}/${bname}.gz
237 function mkfilesindices() {
240 cd $base/ftp/indices/files/components
244 log "Querying postgres"
246 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
248 JOIN files_archive_map af ON f.id = af.file_id
249 JOIN component c ON af.component_id = c.id
250 JOIN archive ON af.archive_id = archive.id
253 JOIN architecture a ON b.architecture = a.id)
255 WHERE archive.name = 'ftp-master'
256 ORDER BY path, arch_string
258 psql -At -c "$query" >$ARCHLIST
261 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
264 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
267 log "Generating sources list"
269 sed -n 's/|$//p' $ARCHLIST
271 find ./dists -maxdepth 1 \! -type d
272 find ./dists \! -type d | grep "/source/"
273 ) | sort -u | gzip -9 > source.list.gz
275 log "Generating arch lists"
277 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
279 (sed -n "s/|$a$//p" $ARCHLIST
280 sed -n 's/|all$//p' $ARCHLIST
283 find ./dists -maxdepth 1 \! -type d
284 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
285 ) | sort -u | gzip -9 > arch-$a.list.gz
288 log "Generating suite lists"
291 local suite_id="$(printf %d $1)"
294 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
296 (SELECT sa.source AS source
297 FROM src_associations sa
298 WHERE sa.suite = $suite_id
301 FROM extra_src_references esr
302 JOIN bin_associations ba ON esr.bin_id = ba.bin
303 WHERE ba.suite = $suite_id
305 SELECT b.source AS source
306 FROM bin_associations ba
307 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
308 JOIN dsc_files df ON s.source = df.source
309 JOIN files f ON df.file = f.id
310 JOIN files_archive_map af ON f.id = af.file_id
311 JOIN component c ON af.component_id = c.id
312 JOIN archive ON af.archive_id = archive.id
313 WHERE archive.name = 'ftp-master'
315 psql -F' ' -A -t -c "$query"
318 SELECT './pool/' || c.name || '/' || f.filename
319 FROM bin_associations ba
320 JOIN binaries b ON ba.bin = b.id
321 JOIN files f ON b.file = f.id
322 JOIN files_archive_map af ON f.id = af.file_id
323 JOIN component c ON af.component_id = c.id
324 JOIN archive ON af.archive_id = archive.id
325 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
327 psql -F' ' -A -t -c "$query"
330 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
331 while read id suite; do
332 [ -e $base/ftp/dists/$suite ] || continue
335 distname=$(cd dists; readlink $suite || echo $suite)
336 find ./dists/$distname \! -type d
337 for distdir in ./dists/*; do
338 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
342 ) | sort -u | gzip -9 > suite-${suite}.list.gz
345 log "Finding everything on the ftp site to generate sundries"
346 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
349 zcat *.list.gz | cat - *.list | sort -u |
350 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
352 log "Generating files list"
355 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
356 cat - sundries.list dists.list project.list docs.list indices.list |
357 sort -u | poolfirst > ../arch-$a.files
361 for dist in sid jessie stretch; do
362 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
366 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
367 sort -u | poolfirst > ../typical.files
374 function mkchecksums() {
375 local archiveroot dsynclist md5list
377 for archive in "${public_archives[@]}"; do
378 archiveroot="$(get_archiveroot "${archive}")"
379 dsynclist=$dbdir/dsync.${archive}.list
380 md5list=${archiveroot}/indices/md5sums
382 log -n "Creating md5 / dsync index file for ${archive}... "
385 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
386 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
387 ${bindir}/dsync-flist -q link-dups $dsynclist || true
392 local archiveroot mirrordir
394 log "Regenerating \"public\" mirror/ hardlink fun"
395 DATE_SERIAL=$(date +"%Y%m%d01")
396 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
397 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
398 SERIAL="$DATE_SERIAL"
400 SERIAL="$FILESOAPLUS1"
402 date -u > ${TRACEFILE}
403 echo "Using dak v1" >> ${TRACEFILE}
404 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
405 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
407 # Ugly "hack", but hey, it does what we want.
408 cp ${TRACEFILE} ${TRACEFILE_BDO}
410 for archive in "${public_archives[@]}"; do
411 archiveroot="$(get_archiveroot "${archive}")"
412 mirrordir="${archiveroot}/../mirror"
414 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
419 log "Expiring old database dumps..."
421 $scriptsdir/expire_dumps -d . -p -f "dump_*"
424 function transitionsclean() {
425 log "Removing out of date transitions..."
427 dak transitions -c -a
431 log "Updating DM permissions page"
432 dak acl export-per-source dm >$exportdir/dm.txt
436 log "Categorizing uncategorized bugs filed against ftp.debian.org"
437 sudo -u dak-unpriv dak bts-categorize
440 function ddaccess() {
441 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
442 log "Trigger dd accessible parts sync including ftp dir"
443 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
446 function mirrorpush() {
447 log "Checking the public archive copy"
448 cd ${mirrordir}/dists
451 for release in $(find . -name "InRelease"); do
452 echo "Processing: ${release}"
453 subdir=${release%/InRelease}
454 while read SHASUM SIZE NAME; do
455 if ! [ -f "${subdir}/${NAME}" ]; then
456 bname=$(basename ${NAME})
457 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
458 # We don't keep unpacked files, don't check for their existance.
459 # We might want to go and check their unpacked shasum, but right now
460 # I don't care. I believe it should be enough if all the packed shasums
464 broken=$(( broken + 1 ))
465 echo "File ${subdir}/${NAME} is missing"
469 # We do have symlinks in the tree (see the contents files currently).
470 # So we use "readlink -f" to check the size of the target, as thats basically
471 # what gen-releases does
472 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
473 if [ ${fsize} -ne ${SIZE} ]; then
474 broken=$(( broken + 1 ))
475 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
479 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
480 fshasum=${fshasum%% *}
481 if [ "${fshasum}" != "${SHASUM}" ]; then
482 broken=$(( broken + 1 ))
483 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
486 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
489 if [ $broken -gt 0 ]; then
490 log_error "Trouble with the public mirror, found ${broken} errors"
494 log "Starting the mirrorpush"
495 date -u > /srv/ftp.debian.org/web/mirrorstart
496 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
497 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
498 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
499 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
502 function mirrorpush-backports() {
503 log "Syncing backports mirror"
504 sudo -u backports /home/backports/bin/update-archive
508 log "Exporting package data foo for i18n project"
509 STAMP=$(date "+%Y%m%d%H%M")
510 mkdir -p ${scriptdir}/i18n/${STAMP}
511 cd ${scriptdir}/i18n/${STAMP}
512 for suite in stable testing unstable; do
513 codename=$(dak admin s show ${suite}|grep '^Codename')
514 codename=${codename##* }
515 echo "Codename is ${codename}"
516 dak control-suite -l ${suite} >${codename}
518 echo "${STAMP}" > timestamp
519 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
523 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
526 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
530 log "Updating stats data"
532 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
533 R --slave --vanilla < $base/misc/ftpstats.R
534 dak stats arch-space > $webdir/arch-space
535 dak stats pkg-nums > $webdir/pkg-nums
538 function cleantransactions() {
539 log "Cleanup transaction ids older than 3 months"
541 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
544 function logstats() {
545 $masterdir/tools/logs.py "$1"
548 # save timestamp when we start
549 function savetimestamp() {
550 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
551 echo ${NOW} > "${dbdir}/dinstallstart"
554 function maillogfile() {
555 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
558 function renamelogfile() {
559 if [ -f "${dbdir}/dinstallstart" ]; then
560 NOW=$(cat "${dbdir}/dinstallstart")
562 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
563 logstats "$logdir/dinstall_${NOW}.log"
564 bzip2 -9 "$logdir/dinstall_${NOW}.log"
566 error "Problem, I don't know when dinstall started, unable to do log statistics."
567 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
569 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
570 bzip2 -9 "$logdir/dinstall_${NOW}.log"
574 function testingsourcelist() {
575 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
578 # do a last run of process-unchecked before dinstall is on.
579 function process_unchecked() {
580 log "Processing the unchecked queue"
581 UNCHECKED_WITHOUT_LOCK="-p"
586 # Function to update a "statefile" telling people what we are doing
589 # This should be called with the argument(s)
590 # - Status name we want to show.
593 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
594 cat >"${DINSTALLSTATE}" <<EOF
595 Dinstall start: ${DINSTALLBEGIN}
597 Action start: ${RIGHTNOW}
601 # extract changelogs and stuff
602 function changelogs() {
603 if lockfile -r3 $LOCK_CHANGELOG; then
604 log "Extracting changelogs"
605 dak make-changelog -e -a ftp-master
606 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
607 mkdir -p ${exportpublic}/changelogs
608 cd ${exportpublic}/changelogs
609 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
610 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
612 dak make-changelog -e -a backports
613 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
614 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
615 cd /srv/backports-master.debian.org/rsync/export/changelogs
616 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
617 remove_changelog_lock
618 trap remove_changelog_lock EXIT TERM HUP INT QUIT
622 function gitpdiff() {
623 # Might be that we want to change this to have more than one git repository.
624 # Advantage of one is that we do not need much space in terms of storage in git itself,
625 # git gc is pretty good on our input.
626 # But it might be faster. Well, lets test.
627 log "Adjusting the git tree for pdiffs"
630 # The regex needs the architectures seperated with \|
631 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
633 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
635 # Also, we only want contents, packages and sources.
636 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
645 # Second, add all there is into git
648 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
650 TAGD=$(date +%Y-%m-%d-%H-%M)
651 git commit -m "Commit of ${COMD}"