# Create the postgres dump files
function pgdump_pre() {
log "Creating pre-daily-cron-job backup of projectb database..."
- pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
+ pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
}
function pgdump_post() {
function dakcleanup() {
log "Cleanup old packages/files"
- dak clean-suites
+ dak clean-suites -m 10000
dak clean-queues
}
apt-ftparchive generate apt.conf.buildd
}
+function buildd_dir() {
+ # Rebuilt the buildd dir to avoid long times of 403
+ log "Regenerating the buildd incoming dir"
+ STAMP=$(date "+%Y%m%d%H%M")
+ make_buildd_dir
+}
+
function scripts() {
log "Running various scripts from $scriptsdir"
cd $scriptsdir
$scriptsdir/expire_dumps -d . -p -f "dump_*"
}
+function transitionsclean() {
+ log "Removing out of date transitions..."
+ cd $base
+ dak transitions -c -a
+}
+
function reports() {
# Send a report on NEW/BYHAND packages
log "Nagging ftpteam about NEW/BYHAND packages"
function merkel2() {
# Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
- log "Trigger merkels projectb sync"
+ log "Trigger merkel/flotows projectb sync"
ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
+ # Also trigger flotow, the ftpmaster test box
+ ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
+}
+
+function merkel3() {
+ # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
+ log "Trigger merkels dd accessible parts sync"
+ ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
}
function runparts() {
cd $configdir
$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
R --slave --vanilla < $base/misc/ftpstats.R
+ dak stats arch-space > $webdir/arch-space
+ dak stats pkg-nums > $webdir/pkg-nums
}
function aptftpcleanup() {
function compress() {
log "Compress old psql backups"
cd $base/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +1 |
+ find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
+
+ find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin 720 |
+ while read dumpname; do
+ echo "Compressing $dumpname"
+ bzip2 -9v "$dumpname"
+ done
+ find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin 720 |
while read dumpname; do
echo "Compressing $dumpname"
bzip2 -9v "$dumpname"
done
+ finddup -l -d $base/backup
}
function logstats() {
)
stage $GO
+GO=(
+ FUNC="buildd_dir"
+ TIME="buildd_dir"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
GO=(
FUNC="cruft"
TIME="cruft"
stage $GO
GO=(
- FUNC="pgdakdev"
- TIME="dak-dev db"
+ FUNC="expire"
+ TIME="expire_dumps"
ARGS=""
- ERR="false"
+ ERR=""
)
stage $GO
GO=(
- FUNC="expire"
- TIME="expire_dumps"
+ FUNC="transitionsclean"
+ TIME="transitionsclean"
ARGS=""
ERR=""
)
)
stage $GO
-ulimit -m 90000 -d 90000 -s 10000 -v 200000
-
GO=(
FUNC="runparts"
TIME="run-parts"
ARGS=""
ERR="false"
)
+stage $GO
rm -f ${LOCK_BRITNEY}
+GO=(
+ FUNC="pgdakdev"
+ TIME="dak-dev db"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
GO=(
FUNC="aptftpcleanup"
TIME="apt-ftparchive cleanup"
ARGS=""
- ERR=""
+ ERR="false"
+)
+stage $GO
+
+GO=(
+ FUNC="merkel3"
+ TIME="merkel ddaccessible sync"
+ ARGS=""
+ ERR="false"
)
stage $GO