2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
13 function remove_all_locks() {
14 rm -f $LOCK_DAILY $LOCK_ACCEPTED $LOCK_NEW
17 # If we error out this one is called, *FOLLOWED* by cleanup above
19 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
21 subject="ATTENTION ATTENTION!"
22 if [ "${error}" = "false" ]; then
23 subject="${subject} (continued)"
25 subject="${subject} (interrupted)"
27 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
29 if [ -r "${STAGEFILE}.log" ]; then
30 cat "${STAGEFILE}.log"
32 echo "file ${STAGEFILE}.log does not exist, sorry"
33 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
36 ########################################################################
37 # the actual dinstall functions follow #
38 ########################################################################
40 # pushing merkels QA user, part one
42 log "Telling QA user that we start dinstall"
43 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
46 # Updating various files
48 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
50 $scriptsdir/update-bugdoctxt
51 $scriptsdir/update-mirrorlists
52 $scriptsdir/update-mailingliststxt
53 $scriptsdir/update-pseudopackages.sh
56 # The first i18n one, syncing new descriptions
58 log "Synchronizing i18n package descriptions"
59 # First sync their newest data
60 cd ${scriptdir}/i18nsync
61 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
63 # Now check if we still know about the packages for which they created the files
64 # is the timestamp signed by us?
65 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
66 # now read it. As its signed by us we are sure the content is what we expect, no need
67 # to do more here. And we only test -d a directory on it anyway.
68 TSTAMP=$(cat timestamp)
69 # do we have the dir still?
70 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
72 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
73 # Yay, worked, lets copy around
74 for dir in wheezy sid; do
75 if [ -d dists/${dir}/ ]; then
76 cd dists/${dir}/main/i18n
77 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
79 cd ${scriptdir}/i18nsync
82 echo "ARRRR, bad guys, wrong files, ARRR"
83 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
86 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
87 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
90 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
91 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
96 log "Checking for cruft in overrides"
100 function dominate() {
101 log "Removing obsolete source and binary associations"
105 function filelist() {
106 log "Generating file lists for apt-ftparchive"
107 dak generate-filelist
110 function fingerprints() {
111 log "Updating fingerprints"
112 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
115 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
117 if [ -s "${OUTFILE}" ]; then
118 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
119 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
120 To: <debian-project@lists.debian.org>
121 Subject: Debian Maintainers Keyring changes
122 Content-Type: text/plain; charset=utf-8
126 The following changes to the debian-maintainers keyring have just been activated:
130 Debian distribution maintenance software,
131 on behalf of the Keyring maintainers
138 function overrides() {
139 log "Writing overrides into text files"
144 rm -f override.sid.all3
145 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
149 log "Generating package / file mapping"
150 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
153 function packages() {
154 log "Generating Packages and Sources files"
155 dak generate-packages-sources2
156 dak contents generate
160 log "Generating pdiff files"
161 dak generate-index-diffs
165 # XXX: disable once we can remove i18n/Index (#649314)
166 log "Generating i18n/Index"
169 for dist in testing unstable experimental proposed-updates testing-proposed-updates; do
170 $scriptsdir/generate-i18n-Index $dist;
173 log "Generating Release files"
174 dak generate-releases
177 function dakcleanup() {
178 log "Cleanup old packages/files"
179 dak clean-suites -m 10000
183 function buildd_dir() {
184 # Rebuilt the buildd dir to avoid long times of 403
185 log "Regenerating the buildd incoming dir"
186 STAMP=$(date "+%Y%m%d%H%M")
195 log "Removing any core files ..."
196 find -type f -name core -print0 | xargs -0r rm -v
198 log "Checking permissions on files in the FTP tree ..."
199 find -type f \( \! -perm -444 -o -perm +002 \) -ls
200 find -type d \( \! -perm -555 -o -perm +002 \) -ls
202 log "Checking symlinks ..."
205 log "Creating recursive directory listing ... "
206 rm -f .${FILENAME}.new
207 TZ=UTC ls -lR > .${FILENAME}.new
209 if [ -r ${FILENAME}.gz ] ; then
210 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
211 mv -f .${FILENAME}.new ${FILENAME}
212 rm -f ${FILENAME}.patch.gz
213 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz
214 rm -f ${FILENAME}.old.gz
216 mv -f .${FILENAME}.new ${FILENAME}
219 gzip -9cfN ${FILENAME} >${FILENAME}.gz
223 function mkmaintainers() {
224 log 'Creating Maintainers index ... '
227 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers
228 gzip -9v --rsyncable <Maintainers >Maintainers.gz
229 gzip -9v --rsyncable <Uploaders >Uploaders.gz
232 function copyoverrides() {
233 log 'Copying override files into public view ...'
235 for ofile in ${overridedir}/override.{squeeze,wheezy,sid}.{,extra.}{main,contrib,non-free}*; do
237 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
238 chmod g+w ${indices}/${bname}.gz
242 function mkfilesindices() {
244 cd $base/ftp/indices/files/components
248 log "Querying postgres"
250 SELECT CONCAT('./pool/', c.name, '/', f.filename) AS path, a.arch_string AS arch_string
252 JOIN files_archive_map af ON f.id = af.file_id
253 JOIN component c ON af.component_id = c.id
254 JOIN archive ON af.archive_id = archive.id
257 JOIN architecture a ON b.architecture = a.id)
259 WHERE archive.name = 'ftp-master'
260 ORDER BY path, arch_string
262 psql -At -c "$query" >$ARCHLIST
265 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
268 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
271 log "Generating sources list"
273 sed -n 's/|$//p' $ARCHLIST
275 find ./dists -maxdepth 1 \! -type d
276 find ./dists \! -type d | grep "/source/"
277 ) | sort -u | gzip -9 > source.list.gz
279 log "Generating arch lists"
281 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
283 (sed -n "s/|$a$//p" $ARCHLIST
284 sed -n 's/|all$//p' $ARCHLIST
287 find ./dists -maxdepth 1 \! -type d
288 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
289 ) | sort -u | gzip -9 > arch-$a.list.gz
292 log "Generating suite lists"
295 local suite_id="$(printf %d $1)"
298 SELECT DISTINCT CONCAT('./pool/', c.name, '/', f.filename)
300 (SELECT sa.source AS source
301 FROM src_associations sa
302 WHERE sa.suite = $suite_id
305 FROM extra_src_references esr
306 JOIN bin_associations ba ON esr.bin_id = ba.bin
307 WHERE ba.suite = $suite_id
309 SELECT b.source AS source
310 FROM bin_associations ba
311 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
312 JOIN dsc_files df ON s.source = df.source
313 JOIN files f ON df.file = f.id
314 JOIN files_archive_map af ON f.id = af.file_id
315 JOIN component c ON af.component_id = c.id
316 JOIN archive ON af.archive_id = archive.id
317 WHERE archive.name = 'ftp-master'
319 psql -F' ' -A -t -c "$query"
322 SELECT CONCAT('./pool/', c.name, '/', f.filename)
323 FROM bin_associations ba
324 JOIN binaries b ON ba.bin = b.id
325 JOIN files f ON b.file = f.id
326 JOIN files_archive_map af ON f.id = af.file_id
327 JOIN component c ON af.component_id = c.id
328 JOIN archive ON af.archive_id = archive.id
329 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
331 psql -F' ' -A -t -c "$query"
334 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
335 while read id suite; do
336 [ -e $base/ftp/dists/$suite ] || continue
339 distname=$(cd dists; readlink $suite || echo $suite)
340 find ./dists/$distname \! -type d
341 for distdir in ./dists/*; do
342 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
346 ) | sort -u | gzip -9 > suite-${suite}.list.gz
349 log "Finding everything on the ftp site to generate sundries"
350 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
353 zcat *.list.gz | cat - *.list | sort -u |
354 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
356 log "Generating files list"
359 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
360 cat - sundries.list dists.list project.list docs.list indices.list |
361 sort -u | poolfirst > ../arch-$a.files
365 for dist in sid wheezy; do
366 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
370 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-wheezy.list.gz) |
371 sort -u | poolfirst > ../typical.files
377 function mkchecksums() {
378 dsynclist=$dbdir/dsync.list
379 md5list=$indices/md5sums
381 log -n "Creating md5 / dsync index file ... "
384 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
385 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
386 ${bindir}/dsync-flist -q link-dups $dsynclist || true
390 log "Regenerating \"public\" mirror/ hardlink fun"
391 DATE_SERIAL=$(date +"%Y%m%d01")
392 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
393 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
394 SERIAL="$DATE_SERIAL"
396 SERIAL="$FILESOAPLUS1"
398 date -u > ${TRACEFILE}
399 echo "Using dak v1" >> ${TRACEFILE}
400 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
401 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
403 rsync -aH --link-dest ${ftpdir} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${ftpdir}/. .
407 log "Expiring old database dumps..."
409 $scriptsdir/expire_dumps -d . -p -f "dump_*"
412 function transitionsclean() {
413 log "Removing out of date transitions..."
415 dak transitions -c -a
419 log "Updating DM html page"
420 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
424 log "Categorizing uncategorized bugs filed against ftp.debian.org"
428 function ddaccess() {
429 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
430 log "Trigger dd accessible parts sync including ftp dir"
431 ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org pool
434 function mirrorpush() {
435 log "Checking the public archive copy"
436 cd ${mirrordir}/dists
439 for release in $(find . -name "InRelease"); do
440 echo "Processing: ${release}"
441 subdir=${release%/InRelease}
442 while read SHASUM SIZE NAME; do
443 if ! [ -f "${subdir}/${NAME}" ]; then
444 bname=$(basename ${NAME})
445 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
446 # We don't keep unpacked files, don't check for their existance.
447 # We might want to go and check their unpacked shasum, but right now
448 # I don't care. I believe it should be enough if all the packed shasums
452 broken=$(( broken + 1 ))
453 echo "File ${subdir}/${NAME} is missing"
457 # We do have symlinks in the tree (see the contents files currently).
458 # So we use "readlink -f" to check the size of the target, as thats basically
459 # what gen-releases does
460 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
461 if [ ${fsize} -ne ${SIZE} ]; then
462 broken=$(( broken + 1 ))
463 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
467 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
468 fshasum=${fshasum%% *}
469 if [ "${fshasum}" != "${SHASUM}" ]; then
470 broken=$(( broken + 1 ))
471 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
474 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
477 if [ $broken -gt 0 ]; then
478 log_error "Trouble with the public mirror, found ${broken} errors"
482 log "Starting the mirrorpush"
483 date -u > /srv/ftp.debian.org/web/mirrorstart
484 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
485 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
486 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
490 log "Exporting package data foo for i18n project"
491 STAMP=$(date "+%Y%m%d%H%M")
492 mkdir -p ${scriptdir}/i18n/${STAMP}
493 cd ${scriptdir}/i18n/${STAMP}
494 for suite in stable testing unstable; do
495 codename=$(dak admin s show ${suite}|grep '^Codename')
496 codename=${codename##* }
497 echo "Codename is ${codename}"
498 dak control-suite -l ${suite} >${codename}
500 echo "${STAMP}" > timestamp
501 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
505 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
508 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
512 log "Updating stats data"
514 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
515 R --slave --vanilla < $base/misc/ftpstats.R
516 dak stats arch-space > $webdir/arch-space
517 dak stats pkg-nums > $webdir/pkg-nums
520 function aptftpcleanup() {
521 log "Clean up apt-ftparchive's databases"
523 apt-ftparchive -q clean apt.conf
526 function cleantransactions() {
527 log "Cleanup transaction ids older than 3 months"
529 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
532 function logstats() {
533 $masterdir/tools/logs.py "$1"
536 # save timestamp when we start
537 function savetimestamp() {
538 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
539 echo ${NOW} > "${dbdir}/dinstallstart"
542 function maillogfile() {
543 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
546 function renamelogfile() {
547 if [ -f "${dbdir}/dinstallstart" ]; then
548 NOW=$(cat "${dbdir}/dinstallstart")
550 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
551 logstats "$logdir/dinstall_${NOW}.log"
552 bzip2 -9 "$logdir/dinstall_${NOW}.log"
554 error "Problem, I don't know when dinstall started, unable to do log statistics."
555 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
557 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
558 bzip2 -9 "$logdir/dinstall_${NOW}.log"
562 function testingsourcelist() {
563 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
566 # do a last run of process-unchecked before dinstall is on.
567 function process_unchecked() {
568 log "Processing the unchecked queue"
569 UNCHECKED_WITHOUT_LOCK="-p"
574 # do a run of newstage only before dinstall is on.
575 function newstage() {
576 log "Processing the newstage queue"
577 UNCHECKED_WITHOUT_LOCK="-p"
581 # Function to update a "statefile" telling people what we are doing
584 # This should be called with the argument(s)
585 # - Status name we want to show.
588 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
589 cat >"${DINSTALLSTATE}" <<EOF
590 Dinstall start: ${DINSTALLBEGIN}
592 Action start: ${RIGHTNOW}
596 # extract changelogs and stuff
597 function changelogs() {
598 log "Extracting changelogs"
599 dak make-changelog -e
600 mkdir -p ${exportpublic}/changelogs
601 cd ${exportpublic}/changelogs
602 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
603 sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 &
606 function gitpdiff() {
607 # Might be that we want to change this to have more than one git repository.
608 # Advantage of one is that we do not need much space in terms of storage in git itself,
609 # git gc is pretty good on our input.
610 # But it might be faster. Well, lets test.
611 log "Adjusting the git tree for pdiffs"
614 # The regex needs the architectures seperated with \|
615 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
617 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
619 # Also, we only want contents, packages and sources.
620 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
629 # Second, add all there is into git
632 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
634 TAGD=$(date +%Y-%m-%d-%H-%M)
635 git commit -m "Commit of ${COMD}"