2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 function remove_locks {
24 trap - EXIT TERM HUP INT QUIT
25 ts "locked part finished"
28 function lockaccepted {
29 lockfile "$LOCK_ACCEPTED"
30 trap remove_all_locks EXIT TERM HUP INT QUIT
33 # If we error out this one is called, *FOLLOWED* by cleanup above
35 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
37 subject="ATTENTION ATTENTION!"
38 if [ "${error}" = "false" ]; then
39 subject="${subject} (continued)"
41 subject="${subject} (interrupted)"
43 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
45 if [ -r "${STAGEFILE}.log" ]; then
46 cat "${STAGEFILE}.log"
48 echo "file ${STAGEFILE}.log does not exist, sorry"
49 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
52 ########################################################################
53 # the actual dinstall functions follow #
54 ########################################################################
56 # pushing merkels QA user, part one
58 log "Telling QA user that we start dinstall"
59 ssh -n -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
62 # Updating various files
64 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
66 $scriptsdir/update-bugdoctxt
67 $scriptsdir/update-mirrorlists
68 $scriptsdir/update-mailingliststxt
69 $scriptsdir/update-pseudopackages.sh
72 # The first i18n one, syncing new descriptions
74 log "Synchronizing i18n package descriptions"
75 # First sync their newest data
76 cd ${scriptdir}/i18nsync
77 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
79 # Now check if we still know about the packages for which they created the files
80 # is the timestamp signed by us?
81 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
82 # now read it. As its signed by us we are sure the content is what we expect, no need
83 # to do more here. And we only test -d a directory on it anyway.
84 TSTAMP=$(cat timestamp)
85 # do we have the dir still?
86 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
88 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
89 # Yay, worked, lets copy around
90 for dir in ${extimportdists}; do
91 if [ -d dists/${dir}/ ]; then
92 cd dists/${dir}/main/i18n
93 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
95 cd ${scriptdir}/i18nsync
98 echo "ARRRR, bad guys, wrong files, ARRR"
99 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
102 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
103 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
106 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
107 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
111 # Syncing AppStream/DEP-11 data
113 log "Synchronizing AppStream metadata"
114 # First sync their newest data
115 local dep11dir="${scriptdir}/dep11"
118 rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true
121 if ${scriptsdir}/dep11-basic-validate.py . ${dep11dir}/; then
122 # Yay, worked, lets copy around
123 for dir in ${extimportdists}; do
124 if [ -d ${dir}/ ]; then
125 for comp in main contrib non-free; do
126 mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
128 rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
134 echo "ARRRR, bad guys, wrong files, ARRR"
135 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
140 log "Checking for cruft in overrides"
144 function dominate() {
145 log "Removing obsolete source and binary associations"
147 dak manage-debug-suites unstable-debug experimental-debug
150 function autocruft() {
151 log "Check for obsolete binary packages"
152 dak auto-decruft -s unstable
153 dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
156 function fingerprints() {
157 log "Updating fingerprints"
158 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
161 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
163 if [ -s "${OUTFILE}" ]; then
164 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
165 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
166 To: <debian-project@lists.debian.org>
167 Subject: Debian Maintainers Keyring changes
168 Content-Type: text/plain; charset=utf-8
172 The following changes to the debian-maintainers keyring have just been activated:
176 Debian distribution maintenance software,
177 on behalf of the Keyring maintainers
184 function overrides() {
185 log "Writing overrides into text files"
193 log "Generating package / file mapping"
194 for archive in "${public_archives[@]}"; do
195 archiveroot="$(get_archiveroot "${archive}")"
196 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
200 function packages() {
201 log "Generating Packages and Sources files"
202 for archive in "${public_archives[@]}"; do
203 log " Generating Packages/Sources for ${archive}"
204 dak generate-packages-sources2 -a "${archive}"
205 log " Generating Contents for ${archive}"
206 dak contents generate -a "${archive}"
211 log "Generating pdiff files"
212 dak generate-index-diffs
216 log "Generating Release files"
217 for archive in "${public_archives[@]}"; do
218 dak generate-releases -a "${archive}"
222 function dakcleanup() {
223 log "Cleanup old packages/files"
224 dak clean-suites -m 10000
225 dak clean-queues -i "$unchecked"
232 for archive in "${public_archives[@]}"; do
233 archiveroot="$(get_archiveroot "${archive}")"
236 log "Removing any core files ..."
237 find -type f -name core -print -delete
239 log "Checking symlinks ..."
242 log "Creating recursive directory listing ... "
244 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
248 function mkmaintainers() {
252 log 'Creating Maintainers index ... '
254 for archive in "${public_archives[@]}"; do
255 archiveroot="$(get_archiveroot "${archive}")"
256 indices="${archiveroot}/indices"
257 if ! [ -d "${indices}" ]; then
262 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
263 gzip -9v --rsyncable <Maintainers >Maintainers.gz
264 gzip -9v --rsyncable <Uploaders >Uploaders.gz
268 function copyoverrides() {
269 log 'Copying override files into public view ...'
273 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
275 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
276 chmod g+w ${indices}/${bname}.gz
281 function mkfilesindices() {
284 cd $base/ftp/indices/files/components
288 log "Querying postgres"
290 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
292 JOIN files_archive_map af ON f.id = af.file_id
293 JOIN component c ON af.component_id = c.id
294 JOIN archive ON af.archive_id = archive.id
297 JOIN architecture a ON b.architecture = a.id)
299 WHERE archive.name = 'ftp-master'
300 ORDER BY path, arch_string
302 psql -At -c "$query" >$ARCHLIST
305 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
308 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
311 log "Generating sources list"
313 sed -n 's/|$//p' $ARCHLIST
315 find ./dists -maxdepth 1 \! -type d
316 find ./dists \! -type d | grep "/source/"
317 ) | sort -u | gzip -9 > source.list.gz
319 log "Generating arch lists"
321 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
323 (sed -n "s/|$a$//p" $ARCHLIST
324 sed -n 's/|all$//p' $ARCHLIST
327 find ./dists -maxdepth 1 \! -type d
328 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
329 ) | sort -u | gzip -9 > arch-$a.list.gz
332 log "Generating suite lists"
335 local suite_id="$(printf %d $1)"
338 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
340 (SELECT sa.source AS source
341 FROM src_associations sa
342 WHERE sa.suite = $suite_id
345 FROM extra_src_references esr
346 JOIN bin_associations ba ON esr.bin_id = ba.bin
347 WHERE ba.suite = $suite_id
349 SELECT b.source AS source
350 FROM bin_associations ba
351 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
352 JOIN dsc_files df ON s.source = df.source
353 JOIN files f ON df.file = f.id
354 JOIN files_archive_map af ON f.id = af.file_id
355 JOIN component c ON af.component_id = c.id
356 JOIN archive ON af.archive_id = archive.id
357 WHERE archive.name = 'ftp-master'
359 psql -F' ' -A -t -c "$query"
362 SELECT './pool/' || c.name || '/' || f.filename
363 FROM bin_associations ba
364 JOIN binaries b ON ba.bin = b.id
365 JOIN files f ON b.file = f.id
366 JOIN files_archive_map af ON f.id = af.file_id
367 JOIN component c ON af.component_id = c.id
368 JOIN archive ON af.archive_id = archive.id
369 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
371 psql -F' ' -A -t -c "$query"
374 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
375 while read id suite; do
376 [ -e $base/ftp/dists/$suite ] || continue
379 distname=$(cd dists; readlink $suite || echo $suite)
380 find ./dists/$distname \! -type d
381 for distdir in ./dists/*; do
382 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
386 ) | sort -u | gzip -9 > suite-${suite}.list.gz
389 log "Finding everything on the ftp site to generate sundries"
390 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
393 zcat *.list.gz | cat - *.list | sort -u |
394 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
396 log "Generating files list"
399 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
400 cat - sundries.list dists.list project.list docs.list indices.list |
401 sort -u | poolfirst > ../arch-$a.files
405 for dist in sid jessie stretch; do
406 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
410 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
411 sort -u | poolfirst > ../typical.files
418 function mkchecksums() {
419 local archiveroot dsynclist md5list
421 for archive in "${public_archives[@]}"; do
422 archiveroot="$(get_archiveroot "${archive}")"
423 dsynclist=$dbdir/dsync.${archive}.list
424 md5list=${archiveroot}/indices/md5sums
426 log -n "Creating md5 / dsync index file for ${archive}... "
429 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
430 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
431 ${bindir}/dsync-flist -q link-dups $dsynclist || true
436 local archiveroot targetpath TRACEFILE
438 for archive in "${public_archives[@]}"; do
439 archiveroot="$(get_archiveroot "${archive}")"
440 targetpath="${mirrordir}/${archive}"
441 TRACEFILE="${archiveroot}/project/trace/ftp-master.debian.org"
442 mkdir -p "${archiveroot}/project/trace/"
444 log "Regenerating \"public\" mirror/${archive} hardlink fun"
445 DATE_SERIAL=$(date +"%Y%m%d01")
446 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} || echo ${DATE_SERIAL} )
447 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
448 SERIAL="$DATE_SERIAL"
450 SERIAL="$FILESOAPLUS1"
452 date -u > ${TRACEFILE}
453 echo "Using dak v1" >> ${TRACEFILE}
454 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
455 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
457 mkdir -p ${targetpath}
459 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
464 log "Expiring old database dumps..."
466 $scriptsdir/expire_dumps -d . -p -f "dump_*"
469 function transitionsclean() {
470 log "Removing out of date transitions..."
472 dak transitions -c -a
476 log "Updating DM permissions page"
477 dak acl export-per-source dm >$exportdir/dm.txt
481 log "Categorizing uncategorized bugs filed against ftp.debian.org"
482 sudo -u dak-unpriv dak bts-categorize
485 function ddaccess() {
486 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
487 log "Trigger dd accessible parts sync including ftp dir"
488 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
491 function mirrorpush() {
492 log "Checking the public archive copies..."
494 local archiveroot targetpath
496 for archive in "${public_archives[@]}"; do
497 log "... archive: ${archive}"
498 archiveroot="$(get_archiveroot "${archive}")"
499 targetpath="${mirrordir}/${archive}"
500 cd ${archiveroot}/dists
503 for release in $(find . -name "InRelease"); do
504 echo "Processing: ${release}"
505 subdir=${release%/InRelease}
506 while read SHASUM SIZE NAME; do
507 if ! [ -f "${subdir}/${NAME}" ]; then
508 bname=$(basename ${NAME})
509 if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-(amd64|i386)\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
511 # We don't keep unpacked files, don't check for their existance.
512 # We might want to go and check their unpacked shasum, but right now
513 # I don't care. I believe it should be enough if all the packed shasums
517 broken=$(( broken + 1 ))
518 echo "File ${subdir}/${NAME} is missing"
522 # We do have symlinks in the tree (see the contents files currently).
523 # So we use "readlink -f" to check the size of the target, as thats basically
524 # what gen-releases does
525 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
526 if [ ${fsize} -ne ${SIZE} ]; then
527 broken=$(( broken + 1 ))
528 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
532 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
533 fshasum=${fshasum%% *}
534 if [ "${fshasum}" != "${SHASUM}" ]; then
535 broken=$(( broken + 1 ))
536 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
539 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
542 if [ $broken -gt 0 ]; then
543 log_error "Trouble with the public mirror for ${archive}, found ${broken} errors"
546 log "Starting the mirrorpush for ${archive}"
556 pusharg="-a backports"
559 fname="mirrorstart.${archive}"
562 date -u > /srv/ftp.debian.org/web/${fname}
563 echo "Using dak v1" >> /srv/ftp.debian.org/web/${fname}
564 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/${fname}
565 sudo -H -u archvsync /home/archvsync/runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 &
570 function mirrorpush-backports() {
571 log "Syncing backports mirror"
572 sudo -u backports /home/backports/bin/update-archive
576 log "Exporting package data foo for i18n project"
577 STAMP=$(date "+%Y%m%d%H%M")
578 mkdir -p ${scriptdir}/i18n/${STAMP}
579 cd ${scriptdir}/i18n/${STAMP}
580 for suite in stable testing unstable; do
581 codename=$(dak admin s show ${suite}|grep '^Codename')
582 codename=${codename##* }
583 echo "Codename is ${codename}"
584 dak control-suite -l ${suite} >${codename}
586 echo "${STAMP}" > timestamp
587 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
591 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
594 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
598 log "Updating stats data"
600 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
601 R --slave --vanilla < $base/misc/ftpstats.R
602 dak stats arch-space > $webdir/arch-space
603 dak stats pkg-nums > $webdir/pkg-nums
606 function cleantransactions() {
607 log "Cleanup transaction ids older than 3 months"
609 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
612 function logstats() {
613 $masterdir/tools/logs.py "$1"
616 # save timestamp when we start
617 function savetimestamp() {
618 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
619 echo ${NOW} > "${dbdir}/dinstallstart"
622 function maillogfile() {
623 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
626 function testingsourcelist() {
627 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
630 # Function to update a "statefile" telling people what we are doing
633 # This should be called with the argument(s)
634 # - Status name we want to show.
637 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
638 cat >"${DINSTALLSTATE}" <<EOF
639 Dinstall start: ${DINSTALLBEGIN}
641 Action start: ${RIGHTNOW}
645 # extract changelogs and stuff
646 function changelogs() {
647 if lockfile -r3 $LOCK_CHANGELOG; then
648 log "Extracting changelogs"
649 dak make-changelog -e -a ftp-master
650 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
651 mkdir -p ${exportpublic}/changelogs
652 cd ${exportpublic}/changelogs
653 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
654 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
656 dak make-changelog -e -a backports
657 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
658 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
659 cd /srv/backports-master.debian.org/rsync/export/changelogs
660 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
661 remove_changelog_lock
662 trap remove_changelog_lock EXIT TERM HUP INT QUIT
666 function gitpdiff() {
667 # Might be that we want to change this to have more than one git repository.
668 # Advantage of one is that we do not need much space in terms of storage in git itself,
669 # git gc is pretty good on our input.
670 # But it might be faster. Well, lets test.
671 log "Adjusting the git tree for pdiffs"
674 # The regex needs the architectures seperated with \|
675 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
677 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
679 # Also, we only want contents, packages and sources.
680 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
689 # Second, add all there is into git
692 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
694 TAGD=$(date +%Y-%m-%d-%H-%M)
695 git commit -m "Commit of ${COMD}"