########################################################################
# pushing merkels QA user, part one
-function merkel1() {
- log "Telling merkels QA user that we start dinstall"
+function qa1() {
+ log "Telling QA user that we start dinstall"
ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
}
-# Create the postgres dump files
-function pgdump_pre() {
- log "Creating pre-daily-cron-job backup of $PGDATABASE database..."
- pg_dump > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
-}
-
-function pgdump_post() {
- log "Creating post-daily-cron-job backup of $PGDATABASE database..."
- cd $base/backup
- POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
- pg_dump > $base/backup/dump_$POSTDUMP
- #pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
- ln -sf $base/backup/dump_$POSTDUMP current
- #ln -sf $base/backup/dumpall_$POSTDUMP currentall
-}
-
-# Load the dak-dev projectb
-function pgdakdev() {
- # Make sure to unset any possible psql variables so we don't drop the wrong
- # f****** database by accident
- local PGDATABASE
- unset PGDATABASE
- local PGHOST
- unset PGHOST
- local PGPORT
- unset PGPORT
- local PGUSER
- unset PGUSER
- cd $base/backup
- echo "drop database projectb" | psql -p 5434 template1
- #cat currentall | psql -p 5433 template1
- createdb -p 5434 -T template1 projectb
- fgrep -v '\connect' current | psql -p 5434 projectb
-}
-
# Updating various files
function updates() {
log "Updating Bugs docu, Mirror list and mailing-lists.txt"
}
function fingerprints() {
- log "Not updating fingerprints - scripts needs checking"
-
log "Updating fingerprints"
dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
function packages() {
log "Generating Packages and Sources files"
- cd $configdir
- #apt-ftparchive generate apt.conf
- dak generate-packages-sources
- #TEST: try experimental
- dak contents -s experimental generate
+ #cd $configdir
+ #dak generate-packages-sources
+ dak generate-packages-sources2
+ dak contents generate
}
function pdiff() {
log 'Creating Maintainers index ... '
cd $indices
- dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
- sed -e "s/~[^ ]*\([ ]\)/\1/" | \
- awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
-
- if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
- log "installing Maintainers ... "
- mv -f .new-maintainers Maintainers
- gzip -9v <Maintainers >.new-maintainers.gz
- mv -f .new-maintainers.gz Maintainers.gz
- else
- rm -f .new-maintainers
- fi
-}
-
-function mkuploaders() {
- log 'Creating Uploaders index ... '
-
- cd $indices
- dak make-maintainers -u ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
- sed -e "s/~[^ ]*\([ ]\)/\1/" | \
- awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-uploaders
-
- if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then
- log "installing Uploaders ... "
- mv -f .new-uploaders Uploaders
- gzip -9v <Uploaders >.new-uploaders.gz
- mv -f .new-uploaders.gz Uploaders.gz
- else
- rm -f .new-uploaders
- fi
+ dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers
+ gzip -9v --rsyncable <Maintainers >Maintainers.gz
+ gzip -9v --rsyncable <Uploaders >Uploaders.gz
}
function copyoverrides() {
ARCHLIST=$(tempfile)
- log "Querying $PGDATABASE..."
+ log "Querying postgres"
echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST
includedirs () {
echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
cd ${mirrordir}
- rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
+ rsync -aH --link-dest ${ftpdir} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${ftpdir}/. .
}
function expire() {
dak bts-categorize
}
-function merkel2() {
- # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
- log "Trigger merkel/flotows $PGDATABASE sync"
- ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
- # Also trigger flotow, the ftpmaster test box
- ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
-}
-
-function merkel3() {
- # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
- log "Trigger merkels dd accessible parts sync"
- ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
+function ddaccess() {
+ # Tell our dd accessible mirror to sync itself up. Including ftp dir.
+ log "Trigger dd accessible parts sync including ftp dir"
+ ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org pool
}
function mirrorpush() {
+ log "Checking the public archive copy"
+ cd ${mirrordir}/dists
+
+ broken=0
+ for release in $(find . -name "InRelease"); do
+ echo "Processing: ${release}"
+ subdir=${release%/InRelease}
+ while read SHASUM SIZE NAME; do
+ if ! [ -f "${subdir}/${NAME}" ]; then
+ bname=$(basename ${NAME})
+ if [ "${bname}" = "Packages" ] || [ "${bname}" = "Sources" ]; then
+ # We don't keep unpacked files, don't check for their existance.
+ # We might want to go and check their unpacked shasum, but right now
+ # I don't care. I believe it should be enough if all the packed shasums
+ # match.
+ continue
+ fi
+ broken=$(( broken + 1 ))
+ echo "File ${subdir}/${NAME} is missing"
+ continue
+ fi
+
+ # We do have symlinks in the tree (see the contents files currently).
+ # So we use "readlink -f" to check the size of the target, as thats basically
+ # what gen-releases does
+ fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
+ if [ ${fsize} -ne ${SIZE} ]; then
+ broken=$(( broken + 1 ))
+ echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
+ continue
+ fi
+
+ fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
+ fshasum=${fshasum%% *}
+ if [ "${fshasum}" != "${SHASUM}" ]; then
+ broken=$(( broken + 1 ))
+ echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
+ continue
+ fi
+ done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
+ done
+
+ if [ $broken -gt 0 ]; then
+ log_error "Trouble with the public mirror, found ${broken} errors"
+ return 21
+ fi
+
log "Starting the mirrorpush"
date -u > /srv/ftp.debian.org/web/mirrorstart
echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
apt-ftparchive -q clean apt.conf
}
-function compress() {
- log "Compress old psql backups"
+function cleantransactions() {
+ log "Cleanup transaction ids older than 3 months"
cd $base/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
-
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' \! -name '*.xz' -mmin +720 -print0 \
- | xargs --no-run-if-empty -0 -P4 -n16 xz -9v
-
- find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' \! -name '*.xz' -mmin +720 \
- | xargs --no-run-if-empty -0 -P4 -n16 xz -9v
- finddup -l -d $base/backup
+ find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
}
function logstats() {
rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 &
}
+
+function gitpdiff() {
+ # Might be that we want to change this to have more than one git repository.
+ # Advantage of one is that we do not need much space in terms of storage in git itself,
+ # git gc is pretty good on our input.
+ # But it might be faster. Well, lets test.
+ log "Adjusting the git tree for pdiffs"
+ cd ${dbdir}/git/git/
+
+ # The regex needs the architectures seperated with \|
+ garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
+
+ # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
+ # want to work with.
+ # Also, we only want contents, packages and sources.
+ for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
+ log "${file}"
+ basen=${file%%.gz};
+ basen=${basen##../};
+ dir=${basen%/*};
+ mkdir -p $dir;
+ zcat $file > $basen;
+ done
+
+ # Second, add all there is into git
+ cd dists
+ git add .
+ # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
+ COMD=$(date -Is)
+ TAGD=$(date +%Y-%m-%d-%H-%M)
+ git commit -m "Commit of ${COMD}"
+ git tag "${TAGD}"
+ }