X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=config%2Fdebian%2Fdinstall.functions;h=25f84600cb490f3a8635258f0b60d0d8b72ae560;hb=f5510b10ea2ea61c85e7cf73ef7d85e80c969704;hp=a2db832890acbe5fff8a690fa2c96f2b2375670e;hpb=1faa56010afa2f91f1c9ccb0a49789d3d3b811c7;p=dak.git diff --git a/config/debian/dinstall.functions b/config/debian/dinstall.functions index a2db8328..25f84600 100644 --- a/config/debian/dinstall.functions +++ b/config/debian/dinstall.functions @@ -9,9 +9,25 @@ function remove_daily_lock() { rm -f $LOCK_DAILY } +# Remove changelog lock +function remove_changelog_lock() { + rm -f $LOCK_CHANGELOG +} + # Remove all locks function remove_all_locks() { - rm -f $LOCK_DAILY $LOCK_ACCEPTED $LOCK_NEW + rm -f $LOCK_DAILY $LOCK_ACCEPTED +} + +function remove_locks { + remove_all_locks + trap - EXIT TERM HUP INT QUIT + ts "locked part finished" +} + +function lockaccepted { + lockfile "$LOCK_ACCEPTED" + trap remove_all_locks EXIT TERM HUP INT QUIT } # If we error out this one is called, *FOLLOWED* by cleanup above @@ -30,7 +46,7 @@ function onerror() { cat "${STAGEFILE}.log" else echo "file ${STAGEFILE}.log does not exist, sorry" - fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters " cron@ftp-master.debian.org } ######################################################################## @@ -40,7 +56,7 @@ function onerror() { # pushing merkels QA user, part one function qa1() { log "Telling QA user that we start dinstall" - ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1 + ssh -n -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1 } # Updating various files @@ -71,7 +87,7 @@ function i18n1() { # Lets check! if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then # Yay, worked, lets copy around - for dir in wheezy sid; do + for dir in ${extimportdists}; do if [ -d dists/${dir}/ ]; then cd dists/${dir}/main/i18n rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/. @@ -80,15 +96,43 @@ function i18n1() { done else echo "ARRRR, bad guys, wrong files, ARRR" - echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters " debian-l10n-devel@lists.alioth.debian.org fi else echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR" - echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters " debian-l10n-devel@lists.alioth.debian.org fi else echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR." - echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters " debian-l10n-devel@lists.alioth.debian.org + fi +} + +# Syncing AppStream/DEP-11 data +function dep11() { + log "Synchronizing AppStream metadata" + # First sync their newest data + local dep11dir="${scriptdir}/dep11" + mkdir -p ${dep11dir} + cd ${dep11dir} + rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true + + # Lets check! + if ${scriptsdir}/dep11-basic-validate.py . ${dep11dir}/; then + # Yay, worked, lets copy around + for dir in ${extimportdists}; do + if [ -d ${dir}/ ]; then + for comp in main contrib non-free; do + mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11 + cd ${dir}/${comp} + rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/. + cd ${dep11dir} + done + fi + done + else + echo "ARRRR, bad guys, wrong files, ARRR" + echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters " mak@debian.org fi } @@ -100,11 +144,13 @@ function cruft() { function dominate() { log "Removing obsolete source and binary associations" dak dominate + dak manage-debug-suites unstable-debug experimental-debug } -function filelist() { - log "Generating file lists for apt-ftparchive" - dak generate-filelist +function autocruft() { + log "Check for obsolete binary packages" + dak auto-decruft -s unstable + dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU" } function fingerprints() { @@ -139,21 +185,26 @@ function overrides() { log "Writing overrides into text files" cd $overridedir dak make-overrides - - # FIXME - rm -f override.sid.all3 - for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done } function mpfm() { + local archiveroot + log "Generating package / file mapping" - dak make-pkg-file-mapping ftp-master | bzip2 -9 > $base/ftp/indices/package-file.map.bz2 + for archive in "${public_archives[@]}"; do + archiveroot="$(get_archiveroot "${archive}")" + dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2" + done } function packages() { log "Generating Packages and Sources files" - dak generate-packages-sources2 -a ftp-master - dak contents generate -a ftp-master + for archive in "${public_archives[@]}"; do + log " Generating Packages/Sources for ${archive}" + dak generate-packages-sources2 -a "${archive}" + log " Generating Contents for ${archive}" + dak contents generate -a "${archive}" + done } function pdiff() { @@ -162,65 +213,69 @@ function pdiff() { } function release() { - # XXX: disable once we can remove i18n/Index (#649314) - log "Generating i18n/Index" - ( - cd "$ftpdir/dists"; - for dist in testing unstable experimental proposed-updates testing-proposed-updates; do - $scriptsdir/generate-i18n-Index $dist; - done - ) log "Generating Release files" - dak generate-releases -a ftp-master + for archive in "${public_archives[@]}"; do + dak generate-releases -a "${archive}" + done } function dakcleanup() { log "Cleanup old packages/files" dak clean-suites -m 10000 - # XXX: reactivate once clean-queues is fixed - #dak clean-queues -} - -function buildd_dir() { - # Rebuilt the buildd dir to avoid long times of 403 - log "Regenerating the buildd incoming dir" - STAMP=$(date "+%Y%m%d%H%M") - make_buildd_dir + dak clean-queues -i "$unchecked" } function mklslar() { - cd $ftpdir + local archiveroot + local FILENAME=ls-lR - FILENAME=ls-lR + for archive in "${public_archives[@]}"; do + archiveroot="$(get_archiveroot "${archive}")" + cd "${archiveroot}" - log "Removing any core files ..." - find -type f -name core -print -delete + log "Removing any core files ..." + find -type f -name core -print -delete - log "Checking symlinks ..." - symlinks -rd . + log "Checking symlinks ..." + symlinks -rd . - log "Creating recursive directory listing ... " - rm -f ${FILENAME}.gz - TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz + log "Creating recursive directory listing ... " + rm -f ${FILENAME}.gz + TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz + done } function mkmaintainers() { + local archiveroot + local indices + log 'Creating Maintainers index ... ' - cd $indices - dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers - gzip -9v --rsyncable Maintainers.gz - gzip -9v --rsyncable Uploaders.gz + for archive in "${public_archives[@]}"; do + archiveroot="$(get_archiveroot "${archive}")" + indices="${archiveroot}/indices" + if ! [ -d "${indices}" ]; then + mkdir "${indices}" + fi + cd "${indices}" + + dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers + gzip -9v --rsyncable Maintainers.gz + gzip -9v --rsyncable Uploaders.gz + done } function copyoverrides() { log 'Copying override files into public view ...' - for ofile in ${overridedir}/override.{squeeze,wheezy,sid}.{,extra.}{main,contrib,non-free}*; do - bname=${ofile##*/} - gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz - chmod g+w ${indices}/${bname}.gz - done + ( + shopt -s nullglob + for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do + bname=${ofile##*/} + gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz + chmod g+w ${indices}/${bname}.gz + done + ) } function mkfilesindices() { @@ -304,7 +359,7 @@ function mkfilesindices() { psql -F' ' -A -t -c "$query" query=" - SELECT './pool/' || c.name || '/' || f.filename) + SELECT './pool/' || c.name || '/' || f.filename FROM bin_associations ba JOIN binaries b ON ba.bin = b.id JOIN files f ON b.file = f.id @@ -347,12 +402,12 @@ function mkfilesindices() { done (cd $base/ftp/ - for dist in sid wheezy; do + for dist in sid jessie stretch; do find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz done ) - (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-wheezy.list.gz) | + (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) | sort -u | poolfirst > ../typical.files rm -f $ARCHLIST @@ -361,32 +416,48 @@ function mkfilesindices() { } function mkchecksums() { - dsynclist=$dbdir/dsync.list - md5list=$indices/md5sums + local archiveroot dsynclist md5list - log -n "Creating md5 / dsync index file ... " + for archive in "${public_archives[@]}"; do + archiveroot="$(get_archiveroot "${archive}")" + dsynclist=$dbdir/dsync.${archive}.list + md5list=${archiveroot}/indices/md5sums - cd "$ftpdir" - ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5 - ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz - ${bindir}/dsync-flist -q link-dups $dsynclist || true + log -n "Creating md5 / dsync index file for ${archive}... " + + cd "$archiveroot" + ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5 + ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz + ${bindir}/dsync-flist -q link-dups $dsynclist || true + done } function mirror() { - log "Regenerating \"public\" mirror/ hardlink fun" - DATE_SERIAL=$(date +"%Y%m%d01") - FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} ) - if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then - SERIAL="$DATE_SERIAL" - else - SERIAL="$FILESOAPLUS1" - fi - date -u > ${TRACEFILE} - echo "Using dak v1" >> ${TRACEFILE} - echo "Running on host: $(hostname -f)" >> ${TRACEFILE} - echo "Archive serial: ${SERIAL}" >> ${TRACEFILE} - cd ${mirrordir} - rsync -aH --link-dest ${ftpdir} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${ftpdir}/. . + local archiveroot targetpath TRACEFILE + + for archive in "${public_archives[@]}"; do + archiveroot="$(get_archiveroot "${archive}")" + targetpath="${mirrordir}/${archive}" + TRACEFILE="${archiveroot}/project/trace/ftp-master.debian.org" + mkdir -p "${archiveroot}/project/trace/" + + log "Regenerating \"public\" mirror/${archive} hardlink fun" + DATE_SERIAL=$(date +"%Y%m%d01") + FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} || echo ${DATE_SERIAL} ) + if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then + SERIAL="$DATE_SERIAL" + else + SERIAL="$FILESOAPLUS1" + fi + date -u > ${TRACEFILE} + echo "Using dak v1" >> ${TRACEFILE} + echo "Running on host: $(hostname -f)" >> ${TRACEFILE} + echo "Archive serial: ${SERIAL}" >> ${TRACEFILE} + + mkdir -p ${targetpath} + cd ${targetpath} + rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. . + done } function expire() { @@ -402,74 +473,103 @@ function transitionsclean() { } function dm() { - log "Updating DM html page" - $scriptsdir/dm-monitor >$webdir/dm-uploaders.html + log "Updating DM permissions page" + dak acl export-per-source dm >$exportdir/dm.txt } function bts() { log "Categorizing uncategorized bugs filed against ftp.debian.org" - dak bts-categorize + sudo -u dak-unpriv dak bts-categorize } function ddaccess() { # Tell our dd accessible mirror to sync itself up. Including ftp dir. log "Trigger dd accessible parts sync including ftp dir" - ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org pool + ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync } function mirrorpush() { - log "Checking the public archive copy" - cd ${mirrordir}/dists - - broken=0 - for release in $(find . -name "InRelease"); do - echo "Processing: ${release}" - subdir=${release%/InRelease} - while read SHASUM SIZE NAME; do - if ! [ -f "${subdir}/${NAME}" ]; then - bname=$(basename ${NAME}) - if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then - # We don't keep unpacked files, don't check for their existance. - # We might want to go and check their unpacked shasum, but right now - # I don't care. I believe it should be enough if all the packed shasums - # match. + log "Checking the public archive copies..." + + local archiveroot targetpath + + for archive in "${public_archives[@]}"; do + log "... archive: ${archive}" + archiveroot="$(get_archiveroot "${archive}")" + targetpath="${mirrordir}/${archive}" + cd ${archiveroot}/dists + + broken=0 + for release in $(find . -name "InRelease"); do + echo "Processing: ${release}" + subdir=${release%/InRelease} + while read SHASUM SIZE NAME; do + if ! [ -f "${subdir}/${NAME}" ]; then + bname=$(basename ${NAME}) + if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-(amd64|i386)\.yml|icons-(128x128|64x64)\.tar)$ ]]; then + + # We don't keep unpacked files, don't check for their existance. + # We might want to go and check their unpacked shasum, but right now + # I don't care. I believe it should be enough if all the packed shasums + # match. + continue + fi + broken=$(( broken + 1 )) + echo "File ${subdir}/${NAME} is missing" continue fi - broken=$(( broken + 1 )) - echo "File ${subdir}/${NAME} is missing" - continue - fi - # We do have symlinks in the tree (see the contents files currently). - # So we use "readlink -f" to check the size of the target, as thats basically - # what gen-releases does - fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}")) - if [ ${fsize} -ne ${SIZE} ]; then - broken=$(( broken + 1 )) - echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}" - continue - fi + # We do have symlinks in the tree (see the contents files currently). + # So we use "readlink -f" to check the size of the target, as thats basically + # what gen-releases does + fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}")) + if [ ${fsize} -ne ${SIZE} ]; then + broken=$(( broken + 1 )) + echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}" + continue + fi - fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}")) - fshasum=${fshasum%% *} - if [ "${fshasum}" != "${SHASUM}" ]; then - broken=$(( broken + 1 )) - echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}" - continue - fi - done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d') - done + fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}")) + fshasum=${fshasum%% *} + if [ "${fshasum}" != "${SHASUM}" ]; then + broken=$(( broken + 1 )) + echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}" + continue + fi + done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d') + done - if [ $broken -gt 0 ]; then - log_error "Trouble with the public mirror, found ${broken} errors" - return 21 - fi + if [ $broken -gt 0 ]; then + log_error "Trouble with the public mirror for ${archive}, found ${broken} errors" + continue + else + log "Starting the mirrorpush for ${archive}" + case ${archive} in + ftp-master) + fname="mirrorstart" + pusharg="" + ;; + debian-debug) + pusharg="-a debug" + ;;& + backports) + pusharg="-a backports" + ;;& + *) + fname="mirrorstart.${archive}" + ;; + esac + date -u > /srv/ftp.debian.org/web/${fname} + echo "Using dak v1" >> /srv/ftp.debian.org/web/${fname} + echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/${fname} + sudo -H -u archvsync /home/archvsync/runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 & + fi + done +} - log "Starting the mirrorpush" - date -u > /srv/ftp.debian.org/web/mirrorstart - echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart - echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart - sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 & +function mirrorpush-backports() { + log "Syncing backports mirror" + sudo -u backports /home/backports/bin/update-archive } function i18n2() { @@ -484,7 +584,7 @@ function i18n2() { dak control-suite -l ${suite} >${codename} done echo "${STAMP}" > timestamp - gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp + gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp rm -f md5sum md5sum * > md5sum cd ${webdir}/ @@ -503,16 +603,10 @@ function stats() { dak stats pkg-nums > $webdir/pkg-nums } -function aptftpcleanup() { - log "Clean up apt-ftparchive's databases" - cd $configdir - apt-ftparchive -q clean apt.conf -} - function cleantransactions() { log "Cleanup transaction ids older than 3 months" cd $base/backup/ - find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm + find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete } function logstats() { @@ -521,42 +615,18 @@ function logstats() { # save timestamp when we start function savetimestamp() { - NOW=`date "+%Y.%m.%d-%H:%M:%S"` - echo ${NOW} > "${dbdir}/dinstallstart" + NOW=`date "+%Y.%m.%d-%H:%M:%S"` + echo ${NOW} > "${dbdir}/dinstallstart" } function maillogfile() { - cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org -} - -function renamelogfile() { - if [ -f "${dbdir}/dinstallstart" ]; then - NOW=$(cat "${dbdir}/dinstallstart") -# maillogfile - mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" - logstats "$logdir/dinstall_${NOW}.log" - bzip2 -9 "$logdir/dinstall_${NOW}.log" - else - error "Problem, I don't know when dinstall started, unable to do log statistics." - NOW=`date "+%Y.%m.%d-%H:%M:%S"` -# maillogfile - mv "$LOGFILE" "$logdir/dinstall_${NOW}.log" - bzip2 -9 "$logdir/dinstall_${NOW}.log" - fi + cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters " cron@ftp-master.debian.org } function testingsourcelist() { dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list } -# do a last run of process-unchecked before dinstall is on. -function process_unchecked() { - log "Processing the unchecked queue" - UNCHECKED_WITHOUT_LOCK="-p" - do_unchecked - sync_debbugs -} - # Function to update a "statefile" telling people what we are doing # (more or less). # @@ -574,43 +644,21 @@ EOF # extract changelogs and stuff function changelogs() { - log "Extracting changelogs" - dak make-changelog -e - mkdir -p ${exportpublic}/changelogs - cd ${exportpublic}/changelogs - rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. . - sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 & -} - -function gitpdiff() { - # Might be that we want to change this to have more than one git repository. - # Advantage of one is that we do not need much space in terms of storage in git itself, - # git gc is pretty good on our input. - # But it might be faster. Well, lets test. - log "Adjusting the git tree for pdiffs" - cd ${dbdir}/git/git/ - - # The regex needs the architectures seperated with \| - garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q') - - # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we - # want to work with. - # Also, we only want contents, packages and sources. - for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do - log "${file}" - basen=${file%%.gz}; - basen=${basen##../}; - dir=${basen%/*}; - mkdir -p $dir; - zcat $file > $basen; - done - - # Second, add all there is into git - cd dists - git add . - # Maybe we want to make this the same for tag and commit? But well, shouldn't matter - COMD=$(date -Is) - TAGD=$(date +%Y-%m-%d-%H-%M) - git commit -m "Commit of ${COMD}" - git tag "${TAGD}" - } + if lockfile -r3 $LOCK_CHANGELOG; then + log "Extracting changelogs" + dak make-changelog -e -a ftp-master + [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml + mkdir -p ${exportpublic}/changelogs + cd ${exportpublic}/changelogs + rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. . + sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 & + + dak make-changelog -e -a backports + [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml + mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs + cd /srv/backports-master.debian.org/rsync/export/changelogs + rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. . + remove_changelog_lock + trap remove_changelog_lock EXIT TERM HUP INT QUIT + fi +}