2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
13 function remove_all_locks() {
14 rm -f $LOCK_DAILY $LOCK_ACCEPTED $LOCK_NEW
17 # If we error out this one is called, *FOLLOWED* by cleanup above
19 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
21 subject="ATTENTION ATTENTION!"
22 if [ "${error}" = "false" ]; then
23 subject="${subject} (continued)"
25 subject="${subject} (interrupted)"
27 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
29 if [ -r "${STAGEFILE}.log" ]; then
30 cat "${STAGEFILE}.log"
32 echo "file ${STAGEFILE}.log does not exist, sorry"
33 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
36 ########################################################################
37 # the actual dinstall functions follow #
38 ########################################################################
40 # pushing merkels QA user, part one
42 log "Telling QA user that we start dinstall"
43 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
46 # Updating various files
48 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
50 $scriptsdir/update-bugdoctxt
51 $scriptsdir/update-mirrorlists
52 $scriptsdir/update-mailingliststxt
53 $scriptsdir/update-pseudopackages.sh
56 # The first i18n one, syncing new descriptions
58 log "Synchronizing i18n package descriptions"
59 # First sync their newest data
60 cd ${scriptdir}/i18nsync
61 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
63 # Now check if we still know about the packages for which they created the files
64 # is the timestamp signed by us?
65 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
66 # now read it. As its signed by us we are sure the content is what we expect, no need
67 # to do more here. And we only test -d a directory on it anyway.
68 TSTAMP=$(cat timestamp)
69 # do we have the dir still?
70 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
72 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
73 # Yay, worked, lets copy around
74 for dir in wheezy sid; do
75 if [ -d dists/${dir}/ ]; then
76 cd dists/${dir}/main/i18n
77 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
79 cd ${scriptdir}/i18nsync
82 echo "ARRRR, bad guys, wrong files, ARRR"
83 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
86 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
87 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
90 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
91 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
96 log "Checking for cruft in overrides"
100 function dominate() {
101 log "Removing obsolete source and binary associations"
105 function filelist() {
106 log "Generating file lists for apt-ftparchive"
107 dak generate-filelist
110 function fingerprints() {
111 log "Updating fingerprints"
112 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
115 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
117 if [ -s "${OUTFILE}" ]; then
118 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
119 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
120 To: <debian-project@lists.debian.org>
121 Subject: Debian Maintainers Keyring changes
122 Content-Type: text/plain; charset=utf-8
126 The following changes to the debian-maintainers keyring have just been activated:
130 Debian distribution maintenance software,
131 on behalf of the Keyring maintainers
138 function overrides() {
139 log "Writing overrides into text files"
144 rm -f override.sid.all3
145 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
151 log "Generating package / file mapping"
152 for archive in "${public_archives[@]}"; do
153 archiveroot="$(get_archiveroot "${archive}")"
154 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
158 function packages() {
159 log "Generating Packages and Sources files"
160 for archive in "${public_archives[@]}"; do
161 dak generate-packages-sources2 -a "${archive}"
162 dak contents generate -a "${archive}"
167 log "Generating pdiff files"
168 dak generate-index-diffs
172 # XXX: disable once we can remove i18n/Index (#649314)
173 log "Generating i18n/Index"
176 for dist in testing unstable experimental proposed-updates testing-proposed-updates; do
177 $scriptsdir/generate-i18n-Index $dist;
181 log "Generating Release files"
182 for archive in "${public_archives[@]}"; do
183 dak generate-releases -a "${archive}"
187 function dakcleanup() {
188 log "Cleanup old packages/files"
189 dak clean-suites -m 10000
190 dak clean-queues -i "$unchecked"
193 function buildd_dir() {
194 # Rebuilt the buildd dir to avoid long times of 403
195 log "Regenerating the buildd incoming dir"
196 STAMP=$(date "+%Y%m%d%H%M")
204 for archive in "${public_archives[@]}"; do
205 archiveroot="$(get_archiveroot "${archive}")"
208 log "Removing any core files ..."
209 find -type f -name core -print -delete
211 log "Checking symlinks ..."
214 log "Creating recursive directory listing ... "
216 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
220 function mkmaintainers() {
223 log 'Creating Maintainers index ... '
225 for archive in "${public_archives[@]}"; do
226 archiveroot="$(get_archiveroot "${archive}")"
227 cd "${archiveroot}/indices"
229 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
230 gzip -9v --rsyncable <Maintainers >Maintainers.gz
231 gzip -9v --rsyncable <Uploaders >Uploaders.gz
235 function copyoverrides() {
236 log 'Copying override files into public view ...'
238 for ofile in ${overridedir}/override.{squeeze,wheezy,sid}.{,extra.}{main,contrib,non-free}*; do
240 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
241 chmod g+w ${indices}/${bname}.gz
245 function mkfilesindices() {
248 cd $base/ftp/indices/files/components
252 log "Querying postgres"
254 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
256 JOIN files_archive_map af ON f.id = af.file_id
257 JOIN component c ON af.component_id = c.id
258 JOIN archive ON af.archive_id = archive.id
261 JOIN architecture a ON b.architecture = a.id)
263 WHERE archive.name = 'ftp-master'
264 ORDER BY path, arch_string
266 psql -At -c "$query" >$ARCHLIST
269 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
272 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
275 log "Generating sources list"
277 sed -n 's/|$//p' $ARCHLIST
279 find ./dists -maxdepth 1 \! -type d
280 find ./dists \! -type d | grep "/source/"
281 ) | sort -u | gzip -9 > source.list.gz
283 log "Generating arch lists"
285 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
287 (sed -n "s/|$a$//p" $ARCHLIST
288 sed -n 's/|all$//p' $ARCHLIST
291 find ./dists -maxdepth 1 \! -type d
292 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
293 ) | sort -u | gzip -9 > arch-$a.list.gz
296 log "Generating suite lists"
299 local suite_id="$(printf %d $1)"
302 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
304 (SELECT sa.source AS source
305 FROM src_associations sa
306 WHERE sa.suite = $suite_id
309 FROM extra_src_references esr
310 JOIN bin_associations ba ON esr.bin_id = ba.bin
311 WHERE ba.suite = $suite_id
313 SELECT b.source AS source
314 FROM bin_associations ba
315 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
316 JOIN dsc_files df ON s.source = df.source
317 JOIN files f ON df.file = f.id
318 JOIN files_archive_map af ON f.id = af.file_id
319 JOIN component c ON af.component_id = c.id
320 JOIN archive ON af.archive_id = archive.id
321 WHERE archive.name = 'ftp-master'
323 psql -F' ' -A -t -c "$query"
326 SELECT './pool/' || c.name || '/' || f.filename
327 FROM bin_associations ba
328 JOIN binaries b ON ba.bin = b.id
329 JOIN files f ON b.file = f.id
330 JOIN files_archive_map af ON f.id = af.file_id
331 JOIN component c ON af.component_id = c.id
332 JOIN archive ON af.archive_id = archive.id
333 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
335 psql -F' ' -A -t -c "$query"
338 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
339 while read id suite; do
340 [ -e $base/ftp/dists/$suite ] || continue
343 distname=$(cd dists; readlink $suite || echo $suite)
344 find ./dists/$distname \! -type d
345 for distdir in ./dists/*; do
346 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
350 ) | sort -u | gzip -9 > suite-${suite}.list.gz
353 log "Finding everything on the ftp site to generate sundries"
354 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
357 zcat *.list.gz | cat - *.list | sort -u |
358 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
360 log "Generating files list"
363 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
364 cat - sundries.list dists.list project.list docs.list indices.list |
365 sort -u | poolfirst > ../arch-$a.files
369 for dist in sid wheezy; do
370 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
374 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-wheezy.list.gz) |
375 sort -u | poolfirst > ../typical.files
382 function mkchecksums() {
383 dsynclist=$dbdir/dsync.list
384 md5list=$indices/md5sums
386 log -n "Creating md5 / dsync index file ... "
389 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
390 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
391 ${bindir}/dsync-flist -q link-dups $dsynclist || true
395 log "Regenerating \"public\" mirror/ hardlink fun"
396 DATE_SERIAL=$(date +"%Y%m%d01")
397 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
398 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
399 SERIAL="$DATE_SERIAL"
401 SERIAL="$FILESOAPLUS1"
403 date -u > ${TRACEFILE}
404 echo "Using dak v1" >> ${TRACEFILE}
405 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
406 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
408 rsync -aH --link-dest ${ftpdir} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${ftpdir}/. .
412 log "Expiring old database dumps..."
414 $scriptsdir/expire_dumps -d . -p -f "dump_*"
417 function transitionsclean() {
418 log "Removing out of date transitions..."
420 dak transitions -c -a
424 log "Updating DM permissions page"
425 dak acl export-per-source dm >$exportdir/dm.txt
429 log "Categorizing uncategorized bugs filed against ftp.debian.org"
433 function ddaccess() {
434 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
435 log "Trigger dd accessible parts sync including ftp dir"
436 ${scriptsdir}/sync-dd ries-sync ries-sync1 ries-sync2 pool
439 function mirrorpush() {
440 log "Checking the public archive copy"
441 cd ${mirrordir}/dists
444 for release in $(find . -name "InRelease"); do
445 echo "Processing: ${release}"
446 subdir=${release%/InRelease}
447 while read SHASUM SIZE NAME; do
448 if ! [ -f "${subdir}/${NAME}" ]; then
449 bname=$(basename ${NAME})
450 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
451 # We don't keep unpacked files, don't check for their existance.
452 # We might want to go and check their unpacked shasum, but right now
453 # I don't care. I believe it should be enough if all the packed shasums
457 broken=$(( broken + 1 ))
458 echo "File ${subdir}/${NAME} is missing"
462 # We do have symlinks in the tree (see the contents files currently).
463 # So we use "readlink -f" to check the size of the target, as thats basically
464 # what gen-releases does
465 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
466 if [ ${fsize} -ne ${SIZE} ]; then
467 broken=$(( broken + 1 ))
468 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
472 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
473 fshasum=${fshasum%% *}
474 if [ "${fshasum}" != "${SHASUM}" ]; then
475 broken=$(( broken + 1 ))
476 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
479 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
482 if [ $broken -gt 0 ]; then
483 log_error "Trouble with the public mirror, found ${broken} errors"
487 log "Starting the mirrorpush"
488 date -u > /srv/ftp.debian.org/web/mirrorstart
489 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
490 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
491 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
495 log "Exporting package data foo for i18n project"
496 STAMP=$(date "+%Y%m%d%H%M")
497 mkdir -p ${scriptdir}/i18n/${STAMP}
498 cd ${scriptdir}/i18n/${STAMP}
499 for suite in stable testing unstable; do
500 codename=$(dak admin s show ${suite}|grep '^Codename')
501 codename=${codename##* }
502 echo "Codename is ${codename}"
503 dak control-suite -l ${suite} >${codename}
505 echo "${STAMP}" > timestamp
506 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
510 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
513 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
517 log "Updating stats data"
519 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
520 R --slave --vanilla < $base/misc/ftpstats.R
521 dak stats arch-space > $webdir/arch-space
522 dak stats pkg-nums > $webdir/pkg-nums
525 function aptftpcleanup() {
526 log "Clean up apt-ftparchive's databases"
528 apt-ftparchive -q clean apt.conf
531 function cleantransactions() {
532 log "Cleanup transaction ids older than 3 months"
534 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
537 function logstats() {
538 $masterdir/tools/logs.py "$1"
541 # save timestamp when we start
542 function savetimestamp() {
543 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
544 echo ${NOW} > "${dbdir}/dinstallstart"
547 function maillogfile() {
548 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
551 function renamelogfile() {
552 if [ -f "${dbdir}/dinstallstart" ]; then
553 NOW=$(cat "${dbdir}/dinstallstart")
555 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
556 logstats "$logdir/dinstall_${NOW}.log"
557 bzip2 -9 "$logdir/dinstall_${NOW}.log"
559 error "Problem, I don't know when dinstall started, unable to do log statistics."
560 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
562 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
563 bzip2 -9 "$logdir/dinstall_${NOW}.log"
567 function testingsourcelist() {
568 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
571 # do a last run of process-unchecked before dinstall is on.
572 function process_unchecked() {
573 log "Processing the unchecked queue"
574 UNCHECKED_WITHOUT_LOCK="-p"
579 # Function to update a "statefile" telling people what we are doing
582 # This should be called with the argument(s)
583 # - Status name we want to show.
586 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
587 cat >"${DINSTALLSTATE}" <<EOF
588 Dinstall start: ${DINSTALLBEGIN}
590 Action start: ${RIGHTNOW}
594 # extract changelogs and stuff
595 function changelogs() {
596 log "Extracting changelogs"
597 dak make-changelog -e -a ftp-master
598 mkdir -p ${exportpublic}/changelogs
599 cd ${exportpublic}/changelogs
600 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
601 sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 &
604 function gitpdiff() {
605 # Might be that we want to change this to have more than one git repository.
606 # Advantage of one is that we do not need much space in terms of storage in git itself,
607 # git gc is pretty good on our input.
608 # But it might be faster. Well, lets test.
609 log "Adjusting the git tree for pdiffs"
612 # The regex needs the architectures seperated with \|
613 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
615 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
617 # Also, we only want contents, packages and sources.
618 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
627 # Second, add all there is into git
630 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
632 TAGD=$(date +%Y-%m-%d-%H-%M)
633 git commit -m "Commit of ${COMD}"