# Remove all locks
function remove_all_locks() {
- rm -f $LOCK_DAILY $LOCK_ACCEPTED $LOCK_NEW
+ rm -f $LOCK_DAILY $LOCK_ACCEPTED
}
# If we error out this one is called, *FOLLOWED* by cleanup above
}
function mpfm() {
+ local archiveroot
+
log "Generating package / file mapping"
- dak make-pkg-file-mapping ftp-master | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
+ for archive in "${public_archives[@]}"; do
+ archiveroot="$(get_archiveroot "${archive}")"
+ dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
+ done
}
function packages() {
log "Generating Packages and Sources files"
- dak generate-packages-sources2 -a ftp-master
- dak contents generate -a ftp-master
+ for archive in "${public_archives[@]}"; do
+ dak generate-packages-sources2 -a "${archive}"
+ dak contents generate -a "${archive}"
+ done
}
function pdiff() {
$scriptsdir/generate-i18n-Index $dist;
done
)
+
log "Generating Release files"
- dak generate-releases -a ftp-master
+ for archive in "${public_archives[@]}"; do
+ dak generate-releases -a "${archive}"
+ done
}
function dakcleanup() {
log "Cleanup old packages/files"
dak clean-suites -m 10000
- # XXX: reactivate once clean-queues is fixed
- #dak clean-queues
+ dak clean-queues -i "$unchecked"
}
function buildd_dir() {
}
function mklslar() {
- cd $ftpdir
+ local archiveroot
+ local FILENAME=ls-lR
- FILENAME=ls-lR
+ for archive in "${public_archives[@]}"; do
+ archiveroot="$(get_archiveroot "${archive}")"
+ cd "${archiveroot}"
- log "Removing any core files ..."
- find -type f -name core -print -delete
+ log "Removing any core files ..."
+ find -type f -name core -print -delete
- log "Checking symlinks ..."
- symlinks -rd .
+ log "Checking symlinks ..."
+ symlinks -rd .
- log "Creating recursive directory listing ... "
- rm -f ${FILENAME}.gz
- TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
+ log "Creating recursive directory listing ... "
+ rm -f ${FILENAME}.gz
+ TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
+ done
}
function mkmaintainers() {
+ local archiveroot
+ local indices
+
log 'Creating Maintainers index ... '
- cd $indices
- dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers
- gzip -9v --rsyncable <Maintainers >Maintainers.gz
- gzip -9v --rsyncable <Uploaders >Uploaders.gz
+ for archive in "${public_archives[@]}"; do
+ archiveroot="$(get_archiveroot "${archive}")"
+ indices="${archiveroot}/indices"
+ if ! [ -d "${indices}" ]; then
+ mkdir "${indices}"
+ fi
+ cd "${indices}"
+
+ dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
+ gzip -9v --rsyncable <Maintainers >Maintainers.gz
+ gzip -9v --rsyncable <Uploaders >Uploaders.gz
+ done
}
function copyoverrides() {
log "Querying postgres"
local query="
- SELECT CONCAT('./pool/', c.name, '/', f.filename) AS path, a.arch_string AS arch_string
+ SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
FROM files f
JOIN files_archive_map af ON f.id = af.file_id
JOIN component c ON af.component_id = c.id
local suite_id="$(printf %d $1)"
local query
query="
- SELECT DISTINCT CONCAT('./pool/', c.name, '/', f.filename)
+ SELECT DISTINCT './pool/' || c.name || '/' || f.filename
FROM
(SELECT sa.source AS source
FROM src_associations sa
psql -F' ' -A -t -c "$query"
query="
- SELECT CONCAT('./pool/', c.name, '/', f.filename)
+ SELECT './pool/' || c.name || '/' || f.filename
FROM bin_associations ba
JOIN binaries b ON ba.bin = b.id
JOIN files f ON b.file = f.id
}
function mkchecksums() {
- dsynclist=$dbdir/dsync.list
- md5list=$indices/md5sums
+ local archiveroot dsynclist md5list
+
+ for archive in "${public_archives[@]}"; do
+ archiveroot="$(get_archiveroot "${archive}")"
+ dsynclist=$dbdir/dsync.${archive}.list
+ md5list=${archiveroot}/indices/md5sums
- log -n "Creating md5 / dsync index file ... "
+ log -n "Creating md5 / dsync index file for ${archive}... "
- cd "$ftpdir"
- ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
- ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
- ${bindir}/dsync-flist -q link-dups $dsynclist || true
+ cd "$archiveroot"
+ ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
+ ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
+ ${bindir}/dsync-flist -q link-dups $dsynclist || true
+ done
}
function mirror() {
+ local archiveroot
+
log "Regenerating \"public\" mirror/ hardlink fun"
DATE_SERIAL=$(date +"%Y%m%d01")
FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
echo "Using dak v1" >> ${TRACEFILE}
echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
- cd ${mirrordir}
- rsync -aH --link-dest ${ftpdir} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${ftpdir}/. .
+
+ # Ugly "hack", but hey, it does what we want.
+ cp ${TRACEFILE} ${TRACEFILE_BDO}
+
+ for archive in "${public_archives[@]}"; do
+ archiveroot="$(get_archiveroot "${archive}")"
+ mirrordir="${archiveroot}/../mirror"
+ cd ${mirrordir}
+ rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
+ done
}
function expire() {
}
function dm() {
- log "Updating DM html page"
- $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
+ log "Updating DM permissions page"
+ dak acl export-per-source dm >$exportdir/dm.txt
}
function bts() {
function ddaccess() {
# Tell our dd accessible mirror to sync itself up. Including ftp dir.
log "Trigger dd accessible parts sync including ftp dir"
- ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org pool
+ ${scriptsdir}/sync-dd ries-sync ries-sync1 ries-sync2 pool
}
function mirrorpush() {
echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
+ sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
}
function i18n2() {
dak control-suite -l ${suite} >${codename}
done
echo "${STAMP}" > timestamp
- gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
+ gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
rm -f md5sum
md5sum * > md5sum
cd ${webdir}/
# extract changelogs and stuff
function changelogs() {
log "Extracting changelogs"
- dak make-changelog -e
+ dak make-changelog -e -a ftp-master
mkdir -p ${exportpublic}/changelogs
cd ${exportpublic}/changelogs
rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .