2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
13 function remove_all_locks() {
14 rm -f $LOCK_DAILY $LOCK_ACCEPTED $LOCK_NEW
17 # If we error out this one is called, *FOLLOWED* by cleanup above
19 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
21 subject="ATTENTION ATTENTION!"
22 if [ "${error}" = "false" ]; then
23 subject="${subject} (continued)"
25 subject="${subject} (interrupted)"
27 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
29 if [ -r "${STAGEFILE}.log" ]; then
30 cat "${STAGEFILE}.log"
32 echo "file ${STAGEFILE}.log does not exist, sorry"
33 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
36 ########################################################################
37 # the actual dinstall functions follow #
38 ########################################################################
40 # pushing merkels QA user, part one
42 log "Telling QA user that we start dinstall"
43 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
46 # Updating various files
48 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
50 $scriptsdir/update-bugdoctxt
51 $scriptsdir/update-mirrorlists
52 $scriptsdir/update-mailingliststxt
53 $scriptsdir/update-pseudopackages.sh
56 # The first i18n one, syncing new descriptions
58 log "Synchronizing i18n package descriptions"
59 # First sync their newest data
60 cd ${scriptdir}/i18nsync
61 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
63 # Now check if we still know about the packages for which they created the files
64 # is the timestamp signed by us?
65 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
66 # now read it. As its signed by us we are sure the content is what we expect, no need
67 # to do more here. And we only test -d a directory on it anyway.
68 TSTAMP=$(cat timestamp)
69 # do we have the dir still?
70 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
72 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
73 # Yay, worked, lets copy around
74 for dir in wheezy sid; do
75 if [ -d dists/${dir}/ ]; then
76 cd dists/${dir}/main/i18n
77 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
79 cd ${scriptdir}/i18nsync
82 echo "ARRRR, bad guys, wrong files, ARRR"
83 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
86 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
87 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
90 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
91 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
96 log "Checking for cruft in overrides"
100 function dominate() {
101 log "Removing obsolete source and binary associations"
105 function filelist() {
106 log "Generating file lists for apt-ftparchive"
107 dak generate-filelist
110 function fingerprints() {
111 log "Updating fingerprints"
112 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
115 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
117 if [ -s "${OUTFILE}" ]; then
118 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
119 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
120 To: <debian-project@lists.debian.org>
121 Subject: Debian Maintainers Keyring changes
122 Content-Type: text/plain; charset=utf-8
126 The following changes to the debian-maintainers keyring have just been activated:
130 Debian distribution maintenance software,
131 on behalf of the Keyring maintainers
138 function overrides() {
139 log "Writing overrides into text files"
144 rm -f override.sid.all3
145 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
149 log "Generating package / file mapping"
150 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
153 function packages() {
154 log "Generating Packages and Sources files"
155 dak generate-packages-sources2
156 dak contents generate
160 log "Generating pdiff files"
161 dak generate-index-diffs
165 # XXX: disable once we can remove i18n/Index (#649314)
166 log "Generating i18n/Index"
169 for dist in testing unstable experimental proposed-updates testing-proposed-updates; do
170 $scriptsdir/generate-i18n-Index $dist;
173 log "Generating Release files"
174 dak generate-releases
177 function dakcleanup() {
178 log "Cleanup old packages/files"
179 dak clean-suites -m 10000
183 function buildd_dir() {
184 # Rebuilt the buildd dir to avoid long times of 403
185 log "Regenerating the buildd incoming dir"
186 STAMP=$(date "+%Y%m%d%H%M")
195 log "Removing any core files ..."
196 find -type f -name core -print0 | xargs -0r rm -v
198 log "Checking permissions on files in the FTP tree ..."
199 find -type f \( \! -perm -444 -o -perm +002 \) -ls
200 find -type d \( \! -perm -555 -o -perm +002 \) -ls
202 log "Checking symlinks ..."
205 log "Creating recursive directory listing ... "
206 rm -f .${FILENAME}.new
207 TZ=UTC ls -lR > .${FILENAME}.new
209 if [ -r ${FILENAME}.gz ] ; then
210 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
211 mv -f .${FILENAME}.new ${FILENAME}
212 rm -f ${FILENAME}.patch.gz
213 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz
214 rm -f ${FILENAME}.old.gz
216 mv -f .${FILENAME}.new ${FILENAME}
219 gzip -9cfN ${FILENAME} >${FILENAME}.gz
223 function mkmaintainers() {
224 log 'Creating Maintainers index ... '
227 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers
228 gzip -9v --rsyncable <Maintainers >Maintainers.gz
229 gzip -9v --rsyncable <Uploaders >Uploaders.gz
232 function copyoverrides() {
233 log 'Copying override files into public view ...'
235 for ofile in ${overridedir}/override.{squeeze,wheezy,sid}.{,extra.}{main,contrib,non-free}*; do
237 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
238 chmod g+w ${indices}/${bname}.gz
242 function mkfilesindices() {
244 cd $base/ftp/indices/files/components
248 log "Querying postgres"
250 SELECT CONCAT('./pool/', c.name, '/', f.filename) AS path, a.arch_string AS arch_string
252 JOIN files_archive_map af ON f.id = af.file_id
253 JOIN component c ON af.component_id = c.id
254 JOIN archive ON af.archive_id = archive.id
257 JOIN architecture a ON b.architecture = a.id)
259 WHERE archive.name = 'ftp-master'
260 ORDER BY path, arch_string
262 psql -At -c "$query" >$ARCHLIST
265 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
268 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
271 log "Generating sources list"
273 sed -n 's/|$//p' $ARCHLIST
275 find ./dists -maxdepth 1 \! -type d
276 find ./dists \! -type d | grep "/source/"
277 ) | sort -u | gzip -9 > source.list.gz
279 log "Generating arch lists"
281 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
283 (sed -n "s/|$a$//p" $ARCHLIST
284 sed -n 's/|all$//p' $ARCHLIST
287 find ./dists -maxdepth 1 \! -type d
288 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
289 ) | sort -u | gzip -9 > arch-$a.list.gz
292 log "Generating suite lists"
295 local suite_id="$(printf %d $1)"
298 SELECT DISTINCT CONCAT('./pool/', c.name, '/', f.filename)
300 (SELECT sa.source AS source
301 FROM src_associations sa
302 WHERE sa.suite = $suite_id
304 SELECT b.source AS source
305 FROM bin_associations ba
306 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
307 JOIN dsc_files df ON s.source = df.source
308 JOIN files f ON df.file = f.id
309 JOIN files_archive_map af ON f.id = af.file_id
310 JOIN component c ON af.component_id = c.id
311 JOIN archive ON af.archive_id = archive.id
312 WHERE archive.name = 'ftp-master'
314 psql -F' ' -A -t -c "$query"
317 SELECT CONCAT('./pool/', c.name, '/', f.filename)
318 FROM bin_associations ba
319 JOIN binaries b ON ba.bin = b.id
320 JOIN files f ON b.file = f.id
321 JOIN files_archive_map af ON f.id = af.file_id
322 JOIN component c ON af.component_id = c.id
323 JOIN archive ON af.archive_id = archive.id
324 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
326 psql -F' ' -A -t -c "$query"
329 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
330 while read id suite; do
331 [ -e $base/ftp/dists/$suite ] || continue
334 distname=$(cd dists; readlink $suite || echo $suite)
335 find ./dists/$distname \! -type d
336 for distdir in ./dists/*; do
337 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
341 ) | sort -u | gzip -9 > suite-${suite}.list.gz
344 log "Finding everything on the ftp site to generate sundries"
345 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
348 zcat *.list.gz | cat - *.list | sort -u |
349 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
351 log "Generating files list"
354 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
355 cat - sundries.list dists.list project.list docs.list indices.list |
356 sort -u | poolfirst > ../arch-$a.files
360 for dist in sid wheezy; do
361 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
365 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-wheezy.list.gz) |
366 sort -u | poolfirst > ../typical.files
372 function mkchecksums() {
373 dsynclist=$dbdir/dsync.list
374 md5list=$indices/md5sums
376 log -n "Creating md5 / dsync index file ... "
379 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
380 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
381 ${bindir}/dsync-flist -q link-dups $dsynclist || true
385 log "Regenerating \"public\" mirror/ hardlink fun"
386 DATE_SERIAL=$(date +"%Y%m%d01")
387 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
388 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
389 SERIAL="$DATE_SERIAL"
391 SERIAL="$FILESOAPLUS1"
393 date -u > ${TRACEFILE}
394 echo "Using dak v1" >> ${TRACEFILE}
395 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
396 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
398 rsync -aH --link-dest ${ftpdir} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${ftpdir}/. .
402 log "Expiring old database dumps..."
404 $scriptsdir/expire_dumps -d . -p -f "dump_*"
407 function transitionsclean() {
408 log "Removing out of date transitions..."
410 dak transitions -c -a
414 log "Updating DM html page"
415 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
419 log "Categorizing uncategorized bugs filed against ftp.debian.org"
423 function ddaccess() {
424 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
425 log "Trigger dd accessible parts sync including ftp dir"
426 ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org pool
429 function mirrorpush() {
430 log "Checking the public archive copy"
431 cd ${mirrordir}/dists
434 for release in $(find . -name "InRelease"); do
435 echo "Processing: ${release}"
436 subdir=${release%/InRelease}
437 while read SHASUM SIZE NAME; do
438 if ! [ -f "${subdir}/${NAME}" ]; then
439 bname=$(basename ${NAME})
440 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
441 # We don't keep unpacked files, don't check for their existance.
442 # We might want to go and check their unpacked shasum, but right now
443 # I don't care. I believe it should be enough if all the packed shasums
447 broken=$(( broken + 1 ))
448 echo "File ${subdir}/${NAME} is missing"
452 # We do have symlinks in the tree (see the contents files currently).
453 # So we use "readlink -f" to check the size of the target, as thats basically
454 # what gen-releases does
455 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
456 if [ ${fsize} -ne ${SIZE} ]; then
457 broken=$(( broken + 1 ))
458 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
462 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
463 fshasum=${fshasum%% *}
464 if [ "${fshasum}" != "${SHASUM}" ]; then
465 broken=$(( broken + 1 ))
466 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
469 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
472 if [ $broken -gt 0 ]; then
473 log_error "Trouble with the public mirror, found ${broken} errors"
477 log "Starting the mirrorpush"
478 date -u > /srv/ftp.debian.org/web/mirrorstart
479 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
480 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
481 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
485 log "Exporting package data foo for i18n project"
486 STAMP=$(date "+%Y%m%d%H%M")
487 mkdir -p ${scriptdir}/i18n/${STAMP}
488 cd ${scriptdir}/i18n/${STAMP}
489 for suite in stable testing unstable; do
490 codename=$(dak admin s show ${suite}|grep '^Codename')
491 codename=${codename##* }
492 echo "Codename is ${codename}"
493 dak control-suite -l ${suite} >${codename}
495 echo "${STAMP}" > timestamp
496 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
500 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
503 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
507 log "Updating stats data"
509 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
510 R --slave --vanilla < $base/misc/ftpstats.R
511 dak stats arch-space > $webdir/arch-space
512 dak stats pkg-nums > $webdir/pkg-nums
515 function aptftpcleanup() {
516 log "Clean up apt-ftparchive's databases"
518 apt-ftparchive -q clean apt.conf
521 function cleantransactions() {
522 log "Cleanup transaction ids older than 3 months"
524 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
527 function logstats() {
528 $masterdir/tools/logs.py "$1"
531 # save timestamp when we start
532 function savetimestamp() {
533 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
534 echo ${NOW} > "${dbdir}/dinstallstart"
537 function maillogfile() {
538 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
541 function renamelogfile() {
542 if [ -f "${dbdir}/dinstallstart" ]; then
543 NOW=$(cat "${dbdir}/dinstallstart")
545 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
546 logstats "$logdir/dinstall_${NOW}.log"
547 bzip2 -9 "$logdir/dinstall_${NOW}.log"
549 error "Problem, I don't know when dinstall started, unable to do log statistics."
550 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
552 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
553 bzip2 -9 "$logdir/dinstall_${NOW}.log"
557 function testingsourcelist() {
558 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
561 # do a last run of process-unchecked before dinstall is on.
562 function process_unchecked() {
563 log "Processing the unchecked queue"
564 UNCHECKED_WITHOUT_LOCK="-p"
569 # do a run of newstage only before dinstall is on.
570 function newstage() {
571 log "Processing the newstage queue"
572 UNCHECKED_WITHOUT_LOCK="-p"
576 # Function to update a "statefile" telling people what we are doing
579 # This should be called with the argument(s)
580 # - Status name we want to show.
583 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
584 cat >"${DINSTALLSTATE}" <<EOF
585 Dinstall start: ${DINSTALLBEGIN}
587 Action start: ${RIGHTNOW}
591 # extract changelogs and stuff
592 function changelogs() {
593 log "Extracting changelogs"
594 dak make-changelog -e
595 mkdir -p ${exportpublic}/changelogs
596 cd ${exportpublic}/changelogs
597 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
598 sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 &
601 function gitpdiff() {
602 # Might be that we want to change this to have more than one git repository.
603 # Advantage of one is that we do not need much space in terms of storage in git itself,
604 # git gc is pretty good on our input.
605 # But it might be faster. Well, lets test.
606 log "Adjusting the git tree for pdiffs"
609 # The regex needs the architectures seperated with \|
610 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
612 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
614 # Also, we only want contents, packages and sources.
615 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
624 # Second, add all there is into git
627 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
629 TAGD=$(date +%Y-%m-%d-%H-%M)
630 git commit -m "Commit of ${COMD}"