2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function fingerprints() {
111 log "Updating fingerprints"
112 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
115 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
117 if [ -s "${OUTFILE}" ]; then
118 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
119 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
120 To: <debian-project@lists.debian.org>
121 Subject: Debian Maintainers Keyring changes
122 Content-Type: text/plain; charset=utf-8
126 The following changes to the debian-maintainers keyring have just been activated:
130 Debian distribution maintenance software,
131 on behalf of the Keyring maintainers
138 function overrides() {
139 log "Writing overrides into text files"
147 log "Generating package / file mapping"
148 for archive in "${public_archives[@]}"; do
149 archiveroot="$(get_archiveroot "${archive}")"
150 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
154 function packages() {
155 log "Generating Packages and Sources files"
156 for archive in "${public_archives[@]}"; do
157 log " Generating Packages/Sources for ${archive}"
158 dak generate-packages-sources2 -a "${archive}"
159 log " Generating Contents for ${archive}"
160 dak contents generate -a "${archive}"
165 log "Generating pdiff files"
166 dak generate-index-diffs
170 log "Generating Release files"
171 for archive in "${public_archives[@]}"; do
172 dak generate-releases -a "${archive}"
176 function dakcleanup() {
177 log "Cleanup old packages/files"
178 dak clean-suites -m 10000
179 dak clean-queues -i "$unchecked"
186 for archive in "${public_archives[@]}"; do
187 archiveroot="$(get_archiveroot "${archive}")"
190 log "Removing any core files ..."
191 find -type f -name core -print -delete
193 log "Checking symlinks ..."
196 log "Creating recursive directory listing ... "
198 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
202 function mkmaintainers() {
206 log 'Creating Maintainers index ... '
208 for archive in "${public_archives[@]}"; do
209 archiveroot="$(get_archiveroot "${archive}")"
210 indices="${archiveroot}/indices"
211 if ! [ -d "${indices}" ]; then
216 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
217 gzip -9v --rsyncable <Maintainers >Maintainers.gz
218 gzip -9v --rsyncable <Uploaders >Uploaders.gz
222 function copyoverrides() {
223 log 'Copying override files into public view ...'
225 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
227 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
228 chmod g+w ${indices}/${bname}.gz
232 function mkfilesindices() {
235 cd $base/ftp/indices/files/components
239 log "Querying postgres"
241 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
243 JOIN files_archive_map af ON f.id = af.file_id
244 JOIN component c ON af.component_id = c.id
245 JOIN archive ON af.archive_id = archive.id
248 JOIN architecture a ON b.architecture = a.id)
250 WHERE archive.name = 'ftp-master'
251 ORDER BY path, arch_string
253 psql -At -c "$query" >$ARCHLIST
256 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
259 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
262 log "Generating sources list"
264 sed -n 's/|$//p' $ARCHLIST
266 find ./dists -maxdepth 1 \! -type d
267 find ./dists \! -type d | grep "/source/"
268 ) | sort -u | gzip -9 > source.list.gz
270 log "Generating arch lists"
272 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
274 (sed -n "s/|$a$//p" $ARCHLIST
275 sed -n 's/|all$//p' $ARCHLIST
278 find ./dists -maxdepth 1 \! -type d
279 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
280 ) | sort -u | gzip -9 > arch-$a.list.gz
283 log "Generating suite lists"
286 local suite_id="$(printf %d $1)"
289 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
291 (SELECT sa.source AS source
292 FROM src_associations sa
293 WHERE sa.suite = $suite_id
296 FROM extra_src_references esr
297 JOIN bin_associations ba ON esr.bin_id = ba.bin
298 WHERE ba.suite = $suite_id
300 SELECT b.source AS source
301 FROM bin_associations ba
302 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
303 JOIN dsc_files df ON s.source = df.source
304 JOIN files f ON df.file = f.id
305 JOIN files_archive_map af ON f.id = af.file_id
306 JOIN component c ON af.component_id = c.id
307 JOIN archive ON af.archive_id = archive.id
308 WHERE archive.name = 'ftp-master'
310 psql -F' ' -A -t -c "$query"
313 SELECT './pool/' || c.name || '/' || f.filename
314 FROM bin_associations ba
315 JOIN binaries b ON ba.bin = b.id
316 JOIN files f ON b.file = f.id
317 JOIN files_archive_map af ON f.id = af.file_id
318 JOIN component c ON af.component_id = c.id
319 JOIN archive ON af.archive_id = archive.id
320 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
322 psql -F' ' -A -t -c "$query"
325 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
326 while read id suite; do
327 [ -e $base/ftp/dists/$suite ] || continue
330 distname=$(cd dists; readlink $suite || echo $suite)
331 find ./dists/$distname \! -type d
332 for distdir in ./dists/*; do
333 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
337 ) | sort -u | gzip -9 > suite-${suite}.list.gz
340 log "Finding everything on the ftp site to generate sundries"
341 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
344 zcat *.list.gz | cat - *.list | sort -u |
345 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
347 log "Generating files list"
350 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
351 cat - sundries.list dists.list project.list docs.list indices.list |
352 sort -u | poolfirst > ../arch-$a.files
356 for dist in sid jessie stretch; do
357 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
361 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
362 sort -u | poolfirst > ../typical.files
369 function mkchecksums() {
370 local archiveroot dsynclist md5list
372 for archive in "${public_archives[@]}"; do
373 archiveroot="$(get_archiveroot "${archive}")"
374 dsynclist=$dbdir/dsync.${archive}.list
375 md5list=${archiveroot}/indices/md5sums
377 log -n "Creating md5 / dsync index file for ${archive}... "
380 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
381 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
382 ${bindir}/dsync-flist -q link-dups $dsynclist || true
387 local archiveroot mirrordir
389 log "Regenerating \"public\" mirror/ hardlink fun"
390 DATE_SERIAL=$(date +"%Y%m%d01")
391 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
392 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
393 SERIAL="$DATE_SERIAL"
395 SERIAL="$FILESOAPLUS1"
397 date -u > ${TRACEFILE}
398 echo "Using dak v1" >> ${TRACEFILE}
399 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
400 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
402 # Ugly "hack", but hey, it does what we want.
403 cp ${TRACEFILE} ${TRACEFILE_BDO}
405 for archive in "${public_archives[@]}"; do
406 archiveroot="$(get_archiveroot "${archive}")"
407 mirrordir="${archiveroot}/../mirror"
409 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
414 log "Expiring old database dumps..."
416 $scriptsdir/expire_dumps -d . -p -f "dump_*"
419 function transitionsclean() {
420 log "Removing out of date transitions..."
422 dak transitions -c -a
426 log "Updating DM permissions page"
427 dak acl export-per-source dm >$exportdir/dm.txt
431 log "Categorizing uncategorized bugs filed against ftp.debian.org"
432 sudo -u dak-unpriv dak bts-categorize
435 function ddaccess() {
436 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
437 log "Trigger dd accessible parts sync including ftp dir"
438 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
441 function mirrorpush() {
442 log "Checking the public archive copy"
443 cd ${mirrordir}/dists
446 for release in $(find . -name "InRelease"); do
447 echo "Processing: ${release}"
448 subdir=${release%/InRelease}
449 while read SHASUM SIZE NAME; do
450 if ! [ -f "${subdir}/${NAME}" ]; then
451 bname=$(basename ${NAME})
452 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
453 # We don't keep unpacked files, don't check for their existance.
454 # We might want to go and check their unpacked shasum, but right now
455 # I don't care. I believe it should be enough if all the packed shasums
459 broken=$(( broken + 1 ))
460 echo "File ${subdir}/${NAME} is missing"
464 # We do have symlinks in the tree (see the contents files currently).
465 # So we use "readlink -f" to check the size of the target, as thats basically
466 # what gen-releases does
467 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
468 if [ ${fsize} -ne ${SIZE} ]; then
469 broken=$(( broken + 1 ))
470 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
474 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
475 fshasum=${fshasum%% *}
476 if [ "${fshasum}" != "${SHASUM}" ]; then
477 broken=$(( broken + 1 ))
478 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
481 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
484 if [ $broken -gt 0 ]; then
485 log_error "Trouble with the public mirror, found ${broken} errors"
489 log "Starting the mirrorpush"
490 date -u > /srv/ftp.debian.org/web/mirrorstart
491 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
492 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
493 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
494 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
497 function mirrorpush-backports() {
498 log "Syncing backports mirror"
499 sudo -u backports /home/backports/bin/update-archive
503 log "Exporting package data foo for i18n project"
504 STAMP=$(date "+%Y%m%d%H%M")
505 mkdir -p ${scriptdir}/i18n/${STAMP}
506 cd ${scriptdir}/i18n/${STAMP}
507 for suite in stable testing unstable; do
508 codename=$(dak admin s show ${suite}|grep '^Codename')
509 codename=${codename##* }
510 echo "Codename is ${codename}"
511 dak control-suite -l ${suite} >${codename}
513 echo "${STAMP}" > timestamp
514 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
518 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
521 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
525 log "Updating stats data"
527 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
528 R --slave --vanilla < $base/misc/ftpstats.R
529 dak stats arch-space > $webdir/arch-space
530 dak stats pkg-nums > $webdir/pkg-nums
533 function cleantransactions() {
534 log "Cleanup transaction ids older than 3 months"
536 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
539 function logstats() {
540 $masterdir/tools/logs.py "$1"
543 # save timestamp when we start
544 function savetimestamp() {
545 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
546 echo ${NOW} > "${dbdir}/dinstallstart"
549 function maillogfile() {
550 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
553 function renamelogfile() {
554 if [ -f "${dbdir}/dinstallstart" ]; then
555 NOW=$(cat "${dbdir}/dinstallstart")
557 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
558 logstats "$logdir/dinstall_${NOW}.log"
559 bzip2 -9 "$logdir/dinstall_${NOW}.log"
561 error "Problem, I don't know when dinstall started, unable to do log statistics."
562 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
564 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
565 bzip2 -9 "$logdir/dinstall_${NOW}.log"
569 function testingsourcelist() {
570 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
573 # do a last run of process-unchecked before dinstall is on.
574 function process_unchecked() {
575 log "Processing the unchecked queue"
576 UNCHECKED_WITHOUT_LOCK="-p"
581 # Function to update a "statefile" telling people what we are doing
584 # This should be called with the argument(s)
585 # - Status name we want to show.
588 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
589 cat >"${DINSTALLSTATE}" <<EOF
590 Dinstall start: ${DINSTALLBEGIN}
592 Action start: ${RIGHTNOW}
596 # extract changelogs and stuff
597 function changelogs() {
598 if lockfile -r3 $LOCK_CHANGELOG; then
599 log "Extracting changelogs"
600 dak make-changelog -e -a ftp-master
601 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
602 mkdir -p ${exportpublic}/changelogs
603 cd ${exportpublic}/changelogs
604 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
605 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
607 dak make-changelog -e -a backports
608 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
609 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
610 cd /srv/backports-master.debian.org/rsync/export/changelogs
611 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
612 remove_changelog_lock
613 trap remove_changelog_lock EXIT TERM HUP INT QUIT
617 function gitpdiff() {
618 # Might be that we want to change this to have more than one git repository.
619 # Advantage of one is that we do not need much space in terms of storage in git itself,
620 # git gc is pretty good on our input.
621 # But it might be faster. Well, lets test.
622 log "Adjusting the git tree for pdiffs"
625 # The regex needs the architectures seperated with \|
626 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
628 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
630 # Also, we only want contents, packages and sources.
631 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
640 # Second, add all there is into git
643 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
645 TAGD=$(date +%Y-%m-%d-%H-%M)
646 git commit -m "Commit of ${COMD}"