2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function autocruft() {
111 log "Check for obsolete binary packages"
112 dak auto-decruft -s unstable
113 dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
116 function fingerprints() {
117 log "Updating fingerprints"
118 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
121 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
123 if [ -s "${OUTFILE}" ]; then
124 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
125 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
126 To: <debian-project@lists.debian.org>
127 Subject: Debian Maintainers Keyring changes
128 Content-Type: text/plain; charset=utf-8
132 The following changes to the debian-maintainers keyring have just been activated:
136 Debian distribution maintenance software,
137 on behalf of the Keyring maintainers
144 function overrides() {
145 log "Writing overrides into text files"
153 log "Generating package / file mapping"
154 for archive in "${public_archives[@]}"; do
155 archiveroot="$(get_archiveroot "${archive}")"
156 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
160 function packages() {
161 log "Generating Packages and Sources files"
162 for archive in "${public_archives[@]}"; do
163 log " Generating Packages/Sources for ${archive}"
164 dak generate-packages-sources2 -a "${archive}"
165 log " Generating Contents for ${archive}"
166 dak contents generate -a "${archive}"
171 log "Generating pdiff files"
172 dak generate-index-diffs
176 log "Generating Release files"
177 for archive in "${public_archives[@]}"; do
178 dak generate-releases -a "${archive}"
182 function dakcleanup() {
183 log "Cleanup old packages/files"
184 dak clean-suites -m 10000
185 dak clean-queues -i "$unchecked"
192 for archive in "${public_archives[@]}"; do
193 archiveroot="$(get_archiveroot "${archive}")"
196 log "Removing any core files ..."
197 find -type f -name core -print -delete
199 log "Checking symlinks ..."
202 log "Creating recursive directory listing ... "
204 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
208 function mkmaintainers() {
212 log 'Creating Maintainers index ... '
214 for archive in "${public_archives[@]}"; do
215 archiveroot="$(get_archiveroot "${archive}")"
216 indices="${archiveroot}/indices"
217 if ! [ -d "${indices}" ]; then
222 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
223 gzip -9v --rsyncable <Maintainers >Maintainers.gz
224 gzip -9v --rsyncable <Uploaders >Uploaders.gz
228 function copyoverrides() {
229 log 'Copying override files into public view ...'
231 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
233 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
234 chmod g+w ${indices}/${bname}.gz
238 function mkfilesindices() {
241 cd $base/ftp/indices/files/components
245 log "Querying postgres"
247 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
249 JOIN files_archive_map af ON f.id = af.file_id
250 JOIN component c ON af.component_id = c.id
251 JOIN archive ON af.archive_id = archive.id
254 JOIN architecture a ON b.architecture = a.id)
256 WHERE archive.name = 'ftp-master'
257 ORDER BY path, arch_string
259 psql -At -c "$query" >$ARCHLIST
262 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
265 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
268 log "Generating sources list"
270 sed -n 's/|$//p' $ARCHLIST
272 find ./dists -maxdepth 1 \! -type d
273 find ./dists \! -type d | grep "/source/"
274 ) | sort -u | gzip -9 > source.list.gz
276 log "Generating arch lists"
278 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
280 (sed -n "s/|$a$//p" $ARCHLIST
281 sed -n 's/|all$//p' $ARCHLIST
284 find ./dists -maxdepth 1 \! -type d
285 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
286 ) | sort -u | gzip -9 > arch-$a.list.gz
289 log "Generating suite lists"
292 local suite_id="$(printf %d $1)"
295 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
297 (SELECT sa.source AS source
298 FROM src_associations sa
299 WHERE sa.suite = $suite_id
302 FROM extra_src_references esr
303 JOIN bin_associations ba ON esr.bin_id = ba.bin
304 WHERE ba.suite = $suite_id
306 SELECT b.source AS source
307 FROM bin_associations ba
308 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
309 JOIN dsc_files df ON s.source = df.source
310 JOIN files f ON df.file = f.id
311 JOIN files_archive_map af ON f.id = af.file_id
312 JOIN component c ON af.component_id = c.id
313 JOIN archive ON af.archive_id = archive.id
314 WHERE archive.name = 'ftp-master'
316 psql -F' ' -A -t -c "$query"
319 SELECT './pool/' || c.name || '/' || f.filename
320 FROM bin_associations ba
321 JOIN binaries b ON ba.bin = b.id
322 JOIN files f ON b.file = f.id
323 JOIN files_archive_map af ON f.id = af.file_id
324 JOIN component c ON af.component_id = c.id
325 JOIN archive ON af.archive_id = archive.id
326 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
328 psql -F' ' -A -t -c "$query"
331 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
332 while read id suite; do
333 [ -e $base/ftp/dists/$suite ] || continue
336 distname=$(cd dists; readlink $suite || echo $suite)
337 find ./dists/$distname \! -type d
338 for distdir in ./dists/*; do
339 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
343 ) | sort -u | gzip -9 > suite-${suite}.list.gz
346 log "Finding everything on the ftp site to generate sundries"
347 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
350 zcat *.list.gz | cat - *.list | sort -u |
351 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
353 log "Generating files list"
356 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
357 cat - sundries.list dists.list project.list docs.list indices.list |
358 sort -u | poolfirst > ../arch-$a.files
362 for dist in sid jessie stretch; do
363 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
367 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
368 sort -u | poolfirst > ../typical.files
375 function mkchecksums() {
376 local archiveroot dsynclist md5list
378 for archive in "${public_archives[@]}"; do
379 archiveroot="$(get_archiveroot "${archive}")"
380 dsynclist=$dbdir/dsync.${archive}.list
381 md5list=${archiveroot}/indices/md5sums
383 log -n "Creating md5 / dsync index file for ${archive}... "
386 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
387 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
388 ${bindir}/dsync-flist -q link-dups $dsynclist || true
393 local archiveroot mirrordir
395 log "Regenerating \"public\" mirror/ hardlink fun"
396 DATE_SERIAL=$(date +"%Y%m%d01")
397 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
398 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
399 SERIAL="$DATE_SERIAL"
401 SERIAL="$FILESOAPLUS1"
403 date -u > ${TRACEFILE}
404 echo "Using dak v1" >> ${TRACEFILE}
405 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
406 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
408 # Ugly "hack", but hey, it does what we want.
409 cp ${TRACEFILE} ${TRACEFILE_BDO}
411 for archive in "${public_archives[@]}"; do
412 archiveroot="$(get_archiveroot "${archive}")"
413 mirrordir="${archiveroot}/../mirror"
415 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
420 log "Expiring old database dumps..."
422 $scriptsdir/expire_dumps -d . -p -f "dump_*"
425 function transitionsclean() {
426 log "Removing out of date transitions..."
428 dak transitions -c -a
432 log "Updating DM permissions page"
433 dak acl export-per-source dm >$exportdir/dm.txt
437 log "Categorizing uncategorized bugs filed against ftp.debian.org"
438 sudo -u dak-unpriv dak bts-categorize
441 function ddaccess() {
442 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
443 log "Trigger dd accessible parts sync including ftp dir"
444 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
447 function mirrorpush() {
448 log "Checking the public archive copy"
449 cd ${mirrordir}/dists
452 for release in $(find . -name "InRelease"); do
453 echo "Processing: ${release}"
454 subdir=${release%/InRelease}
455 while read SHASUM SIZE NAME; do
456 if ! [ -f "${subdir}/${NAME}" ]; then
457 bname=$(basename ${NAME})
458 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
459 # We don't keep unpacked files, don't check for their existance.
460 # We might want to go and check their unpacked shasum, but right now
461 # I don't care. I believe it should be enough if all the packed shasums
465 broken=$(( broken + 1 ))
466 echo "File ${subdir}/${NAME} is missing"
470 # We do have symlinks in the tree (see the contents files currently).
471 # So we use "readlink -f" to check the size of the target, as thats basically
472 # what gen-releases does
473 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
474 if [ ${fsize} -ne ${SIZE} ]; then
475 broken=$(( broken + 1 ))
476 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
480 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
481 fshasum=${fshasum%% *}
482 if [ "${fshasum}" != "${SHASUM}" ]; then
483 broken=$(( broken + 1 ))
484 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
487 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
490 if [ $broken -gt 0 ]; then
491 log_error "Trouble with the public mirror, found ${broken} errors"
495 log "Starting the mirrorpush"
496 date -u > /srv/ftp.debian.org/web/mirrorstart
497 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
498 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
499 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
500 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
503 function mirrorpush-backports() {
504 log "Syncing backports mirror"
505 sudo -u backports /home/backports/bin/update-archive
509 log "Exporting package data foo for i18n project"
510 STAMP=$(date "+%Y%m%d%H%M")
511 mkdir -p ${scriptdir}/i18n/${STAMP}
512 cd ${scriptdir}/i18n/${STAMP}
513 for suite in stable testing unstable; do
514 codename=$(dak admin s show ${suite}|grep '^Codename')
515 codename=${codename##* }
516 echo "Codename is ${codename}"
517 dak control-suite -l ${suite} >${codename}
519 echo "${STAMP}" > timestamp
520 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
524 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
527 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
531 log "Updating stats data"
533 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
534 R --slave --vanilla < $base/misc/ftpstats.R
535 dak stats arch-space > $webdir/arch-space
536 dak stats pkg-nums > $webdir/pkg-nums
539 function cleantransactions() {
540 log "Cleanup transaction ids older than 3 months"
542 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
545 function logstats() {
546 $masterdir/tools/logs.py "$1"
549 # save timestamp when we start
550 function savetimestamp() {
551 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
552 echo ${NOW} > "${dbdir}/dinstallstart"
555 function maillogfile() {
556 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
559 function renamelogfile() {
560 if [ -f "${dbdir}/dinstallstart" ]; then
561 NOW=$(cat "${dbdir}/dinstallstart")
563 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
564 logstats "$logdir/dinstall_${NOW}.log"
565 bzip2 -9 "$logdir/dinstall_${NOW}.log"
567 error "Problem, I don't know when dinstall started, unable to do log statistics."
568 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
570 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
571 bzip2 -9 "$logdir/dinstall_${NOW}.log"
575 function testingsourcelist() {
576 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
579 # do a last run of process-unchecked before dinstall is on.
580 function process_unchecked() {
581 log "Processing the unchecked queue"
582 UNCHECKED_WITHOUT_LOCK="-p"
587 # Function to update a "statefile" telling people what we are doing
590 # This should be called with the argument(s)
591 # - Status name we want to show.
594 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
595 cat >"${DINSTALLSTATE}" <<EOF
596 Dinstall start: ${DINSTALLBEGIN}
598 Action start: ${RIGHTNOW}
602 # extract changelogs and stuff
603 function changelogs() {
604 if lockfile -r3 $LOCK_CHANGELOG; then
605 log "Extracting changelogs"
606 dak make-changelog -e -a ftp-master
607 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
608 mkdir -p ${exportpublic}/changelogs
609 cd ${exportpublic}/changelogs
610 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
611 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
613 dak make-changelog -e -a backports
614 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
615 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
616 cd /srv/backports-master.debian.org/rsync/export/changelogs
617 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
618 remove_changelog_lock
619 trap remove_changelog_lock EXIT TERM HUP INT QUIT
623 function gitpdiff() {
624 # Might be that we want to change this to have more than one git repository.
625 # Advantage of one is that we do not need much space in terms of storage in git itself,
626 # git gc is pretty good on our input.
627 # But it might be faster. Well, lets test.
628 log "Adjusting the git tree for pdiffs"
631 # The regex needs the architectures seperated with \|
632 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
634 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
636 # Also, we only want contents, packages and sources.
637 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
646 # Second, add all there is into git
649 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
651 TAGD=$(date +%Y-%m-%d-%H-%M)
652 git commit -m "Commit of ${COMD}"