2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
13 function remove_all_locks() {
14 rm -f $LOCK_DAILY $LOCK_ACCEPTED $LOCK_NEW
17 # If we error out this one is called, *FOLLOWED* by cleanup above
19 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
21 subject="ATTENTION ATTENTION!"
22 if [ "${error}" = "false" ]; then
23 subject="${subject} (continued)"
25 subject="${subject} (interrupted)"
27 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
29 if [ -r "${STAGEFILE}.log" ]; then
30 cat "${STAGEFILE}.log"
32 echo "file ${STAGEFILE}.log does not exist, sorry"
33 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
36 ########################################################################
37 # the actual dinstall functions follow #
38 ########################################################################
40 # pushing merkels QA user, part one
42 log "Telling QA user that we start dinstall"
43 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
46 # Updating various files
48 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
50 $scriptsdir/update-bugdoctxt
51 $scriptsdir/update-mirrorlists
52 $scriptsdir/update-mailingliststxt
53 $scriptsdir/update-pseudopackages.sh
56 # The first i18n one, syncing new descriptions
58 log "Synchronizing i18n package descriptions"
59 # First sync their newest data
60 cd ${scriptdir}/i18nsync
61 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
63 # Now check if we still know about the packages for which they created the files
64 # is the timestamp signed by us?
65 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
66 # now read it. As its signed by us we are sure the content is what we expect, no need
67 # to do more here. And we only test -d a directory on it anyway.
68 TSTAMP=$(cat timestamp)
69 # do we have the dir still?
70 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
72 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
73 # Yay, worked, lets copy around
74 for dir in wheezy sid; do
75 if [ -d dists/${dir}/ ]; then
76 cd dists/${dir}/main/i18n
77 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
79 cd ${scriptdir}/i18nsync
82 echo "ARRRR, bad guys, wrong files, ARRR"
83 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
86 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
87 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
90 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
91 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
96 log "Checking for cruft in overrides"
100 function dominate() {
101 log "Removing obsolete source and binary associations"
105 function filelist() {
106 log "Generating file lists for apt-ftparchive"
107 dak generate-filelist
110 function fingerprints() {
111 log "Updating fingerprints"
112 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
115 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
117 if [ -s "${OUTFILE}" ]; then
118 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
119 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
120 To: <debian-project@lists.debian.org>
121 Subject: Debian Maintainers Keyring changes
122 Content-Type: text/plain; charset=utf-8
126 The following changes to the debian-maintainers keyring have just been activated:
130 Debian distribution maintenance software,
131 on behalf of the Keyring maintainers
138 function overrides() {
139 log "Writing overrides into text files"
144 rm -f override.sid.all3
145 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
149 log "Generating package / file mapping"
150 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
153 function packages() {
154 log "Generating Packages and Sources files"
155 dak generate-packages-sources2 -a ftp-master
156 dak contents generate -a ftp-master
160 log "Generating pdiff files"
161 dak generate-index-diffs
165 # XXX: disable once we can remove i18n/Index (#649314)
166 log "Generating i18n/Index"
169 for dist in testing unstable experimental proposed-updates testing-proposed-updates; do
170 $scriptsdir/generate-i18n-Index $dist;
173 log "Generating Release files"
174 dak generate-releases -a ftp-master
177 function dakcleanup() {
178 log "Cleanup old packages/files"
179 dak clean-suites -m 10000
180 # XXX: reactivate once clean-queues is fixed
184 function buildd_dir() {
185 # Rebuilt the buildd dir to avoid long times of 403
186 log "Regenerating the buildd incoming dir"
187 STAMP=$(date "+%Y%m%d%H%M")
196 log "Removing any core files ..."
197 find -type f -name core -print0 | xargs -0r rm -v
199 log "Checking permissions on files in the FTP tree ..."
200 find -type f \( \! -perm -444 -o -perm +002 \) -ls
201 find -type d \( \! -perm -555 -o -perm +002 \) -ls
203 log "Checking symlinks ..."
206 log "Creating recursive directory listing ... "
207 rm -f .${FILENAME}.new
208 TZ=UTC ls -lR > .${FILENAME}.new
210 if [ -r ${FILENAME}.gz ] ; then
211 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
212 mv -f .${FILENAME}.new ${FILENAME}
213 rm -f ${FILENAME}.patch.gz
214 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz
215 rm -f ${FILENAME}.old.gz
217 mv -f .${FILENAME}.new ${FILENAME}
220 gzip -9cfN ${FILENAME} >${FILENAME}.gz
224 function mkmaintainers() {
225 log 'Creating Maintainers index ... '
228 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers
229 gzip -9v --rsyncable <Maintainers >Maintainers.gz
230 gzip -9v --rsyncable <Uploaders >Uploaders.gz
233 function copyoverrides() {
234 log 'Copying override files into public view ...'
236 for ofile in ${overridedir}/override.{squeeze,wheezy,sid}.{,extra.}{main,contrib,non-free}*; do
238 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
239 chmod g+w ${indices}/${bname}.gz
243 function mkfilesindices() {
245 cd $base/ftp/indices/files/components
249 log "Querying postgres"
251 SELECT CONCAT('./pool/', c.name, '/', f.filename) AS path, a.arch_string AS arch_string
253 JOIN files_archive_map af ON f.id = af.file_id
254 JOIN component c ON af.component_id = c.id
255 JOIN archive ON af.archive_id = archive.id
258 JOIN architecture a ON b.architecture = a.id)
260 WHERE archive.name = 'ftp-master'
261 ORDER BY path, arch_string
263 psql -At -c "$query" >$ARCHLIST
266 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
269 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
272 log "Generating sources list"
274 sed -n 's/|$//p' $ARCHLIST
276 find ./dists -maxdepth 1 \! -type d
277 find ./dists \! -type d | grep "/source/"
278 ) | sort -u | gzip -9 > source.list.gz
280 log "Generating arch lists"
282 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
284 (sed -n "s/|$a$//p" $ARCHLIST
285 sed -n 's/|all$//p' $ARCHLIST
288 find ./dists -maxdepth 1 \! -type d
289 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
290 ) | sort -u | gzip -9 > arch-$a.list.gz
293 log "Generating suite lists"
296 local suite_id="$(printf %d $1)"
299 SELECT DISTINCT CONCAT('./pool/', c.name, '/', f.filename)
301 (SELECT sa.source AS source
302 FROM src_associations sa
303 WHERE sa.suite = $suite_id
306 FROM extra_src_references esr
307 JOIN bin_associations ba ON esr.bin_id = ba.bin
308 WHERE ba.suite = $suite_id
310 SELECT b.source AS source
311 FROM bin_associations ba
312 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
313 JOIN dsc_files df ON s.source = df.source
314 JOIN files f ON df.file = f.id
315 JOIN files_archive_map af ON f.id = af.file_id
316 JOIN component c ON af.component_id = c.id
317 JOIN archive ON af.archive_id = archive.id
318 WHERE archive.name = 'ftp-master'
320 psql -F' ' -A -t -c "$query"
323 SELECT CONCAT('./pool/', c.name, '/', f.filename)
324 FROM bin_associations ba
325 JOIN binaries b ON ba.bin = b.id
326 JOIN files f ON b.file = f.id
327 JOIN files_archive_map af ON f.id = af.file_id
328 JOIN component c ON af.component_id = c.id
329 JOIN archive ON af.archive_id = archive.id
330 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
332 psql -F' ' -A -t -c "$query"
335 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
336 while read id suite; do
337 [ -e $base/ftp/dists/$suite ] || continue
340 distname=$(cd dists; readlink $suite || echo $suite)
341 find ./dists/$distname \! -type d
342 for distdir in ./dists/*; do
343 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
347 ) | sort -u | gzip -9 > suite-${suite}.list.gz
350 log "Finding everything on the ftp site to generate sundries"
351 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
354 zcat *.list.gz | cat - *.list | sort -u |
355 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
357 log "Generating files list"
360 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
361 cat - sundries.list dists.list project.list docs.list indices.list |
362 sort -u | poolfirst > ../arch-$a.files
366 for dist in sid wheezy; do
367 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
371 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-wheezy.list.gz) |
372 sort -u | poolfirst > ../typical.files
378 function mkchecksums() {
379 dsynclist=$dbdir/dsync.list
380 md5list=$indices/md5sums
382 log -n "Creating md5 / dsync index file ... "
385 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
386 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
387 ${bindir}/dsync-flist -q link-dups $dsynclist || true
391 log "Regenerating \"public\" mirror/ hardlink fun"
392 DATE_SERIAL=$(date +"%Y%m%d01")
393 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
394 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
395 SERIAL="$DATE_SERIAL"
397 SERIAL="$FILESOAPLUS1"
399 date -u > ${TRACEFILE}
400 echo "Using dak v1" >> ${TRACEFILE}
401 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
402 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
404 rsync -aH --link-dest ${ftpdir} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${ftpdir}/. .
408 log "Expiring old database dumps..."
410 $scriptsdir/expire_dumps -d . -p -f "dump_*"
413 function transitionsclean() {
414 log "Removing out of date transitions..."
416 dak transitions -c -a
420 log "Updating DM html page"
421 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
425 log "Categorizing uncategorized bugs filed against ftp.debian.org"
429 function ddaccess() {
430 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
431 log "Trigger dd accessible parts sync including ftp dir"
432 ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org pool
435 function mirrorpush() {
436 log "Checking the public archive copy"
437 cd ${mirrordir}/dists
440 for release in $(find . -name "InRelease"); do
441 echo "Processing: ${release}"
442 subdir=${release%/InRelease}
443 while read SHASUM SIZE NAME; do
444 if ! [ -f "${subdir}/${NAME}" ]; then
445 bname=$(basename ${NAME})
446 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
447 # We don't keep unpacked files, don't check for their existance.
448 # We might want to go and check their unpacked shasum, but right now
449 # I don't care. I believe it should be enough if all the packed shasums
453 broken=$(( broken + 1 ))
454 echo "File ${subdir}/${NAME} is missing"
458 # We do have symlinks in the tree (see the contents files currently).
459 # So we use "readlink -f" to check the size of the target, as thats basically
460 # what gen-releases does
461 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
462 if [ ${fsize} -ne ${SIZE} ]; then
463 broken=$(( broken + 1 ))
464 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
468 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
469 fshasum=${fshasum%% *}
470 if [ "${fshasum}" != "${SHASUM}" ]; then
471 broken=$(( broken + 1 ))
472 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
475 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
478 if [ $broken -gt 0 ]; then
479 log_error "Trouble with the public mirror, found ${broken} errors"
483 log "Starting the mirrorpush"
484 date -u > /srv/ftp.debian.org/web/mirrorstart
485 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
486 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
487 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
491 log "Exporting package data foo for i18n project"
492 STAMP=$(date "+%Y%m%d%H%M")
493 mkdir -p ${scriptdir}/i18n/${STAMP}
494 cd ${scriptdir}/i18n/${STAMP}
495 for suite in stable testing unstable; do
496 codename=$(dak admin s show ${suite}|grep '^Codename')
497 codename=${codename##* }
498 echo "Codename is ${codename}"
499 dak control-suite -l ${suite} >${codename}
501 echo "${STAMP}" > timestamp
502 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
506 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
509 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
513 log "Updating stats data"
515 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
516 R --slave --vanilla < $base/misc/ftpstats.R
517 dak stats arch-space > $webdir/arch-space
518 dak stats pkg-nums > $webdir/pkg-nums
521 function aptftpcleanup() {
522 log "Clean up apt-ftparchive's databases"
524 apt-ftparchive -q clean apt.conf
527 function cleantransactions() {
528 log "Cleanup transaction ids older than 3 months"
530 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
533 function logstats() {
534 $masterdir/tools/logs.py "$1"
537 # save timestamp when we start
538 function savetimestamp() {
539 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
540 echo ${NOW} > "${dbdir}/dinstallstart"
543 function maillogfile() {
544 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
547 function renamelogfile() {
548 if [ -f "${dbdir}/dinstallstart" ]; then
549 NOW=$(cat "${dbdir}/dinstallstart")
551 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
552 logstats "$logdir/dinstall_${NOW}.log"
553 bzip2 -9 "$logdir/dinstall_${NOW}.log"
555 error "Problem, I don't know when dinstall started, unable to do log statistics."
556 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
558 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
559 bzip2 -9 "$logdir/dinstall_${NOW}.log"
563 function testingsourcelist() {
564 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
567 # do a last run of process-unchecked before dinstall is on.
568 function process_unchecked() {
569 log "Processing the unchecked queue"
570 UNCHECKED_WITHOUT_LOCK="-p"
575 # do a run of newstage only before dinstall is on.
576 function newstage() {
577 log "Processing the newstage queue"
578 UNCHECKED_WITHOUT_LOCK="-p"
582 # Function to update a "statefile" telling people what we are doing
585 # This should be called with the argument(s)
586 # - Status name we want to show.
589 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
590 cat >"${DINSTALLSTATE}" <<EOF
591 Dinstall start: ${DINSTALLBEGIN}
593 Action start: ${RIGHTNOW}
597 # extract changelogs and stuff
598 function changelogs() {
599 log "Extracting changelogs"
600 dak make-changelog -e
601 mkdir -p ${exportpublic}/changelogs
602 cd ${exportpublic}/changelogs
603 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
604 sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 &
607 function gitpdiff() {
608 # Might be that we want to change this to have more than one git repository.
609 # Advantage of one is that we do not need much space in terms of storage in git itself,
610 # git gc is pretty good on our input.
611 # But it might be faster. Well, lets test.
612 log "Adjusting the git tree for pdiffs"
615 # The regex needs the architectures seperated with \|
616 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
618 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
620 # Also, we only want contents, packages and sources.
621 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
630 # Second, add all there is into git
633 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
635 TAGD=$(date +%Y-%m-%d-%H-%M)
636 git commit -m "Commit of ${COMD}"