no longer dump the full database on each dinstall run. instead we just
save a transaction id. DSA is doing the backup dumps in whatever cycle,
and keeping the WAL stuff around, so we can tell them to get us to
whatever point in our db which we might need, ie. the specific transaction
id.
as those are just 9 bytes per file, and not 4gigabytes, we now also save
wth we have been before and after unchecked runs, which makes our backup
be nearer to actual filesystem state than ever.
Signed-off-by: Joerg Jaspert <joerg@debian.org>
Signed-off-by: Joerg Jaspert <joerg@debian.org>
dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
cat $webdir/cruft-report-daily.txt | mail -a "X-Debian: DAK" -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
}
+
+function pg_timestamp() {
+ tsname=${1:-"unknown"}
+ log "Saving postgres transaction id for ${tsname}"
+ psql -tAc 'select txid_current();' > $base/backup/txid_${tsname}_$(date +%Y.%m.%d-%H:%M:%S)
+}
stage $GO &
GO=(
- FUNC="pgdump_pre"
+ FUNC="pg_timestamp"
TIME="pg_dump1"
- ARGS=""
+ ARGS="predinstall"
ERR=""
)
stage $GO
stage $GO &
GO=(
- FUNC="pgdump_post"
+ FUNC="pg_timestamp"
TIME="pg_dump2"
- ARGS=""
+ ARGS="postdinstall"
ERR=""
)
-stage $GO &
+stage $GO
GO=(
FUNC="expire"
stage $GO &
GO=(
- FUNC="compress"
- TIME="compress"
+ FUNC="cleantransactions"
+ TIME=""
ARGS=""
ERR=""
)
-stage $GO &
+stage $GO
GO=(
FUNC="aptftpcleanup"
fi
trap cleanup 0
+
+pg_timestamp preunchecked
do_newstage
do_unchecked
fi
dak contents -l 10000 scan
+pg_timestamp postunchecked
apt-ftparchive -q clean apt.conf
}
-function compress() {
- log "Compress old psql backups"
+function cleantransactions() {
+ log "Cleanup transaction ids older than 3 months"
cd $base/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
-
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' \! -name '*.xz' -mmin +720 -print0 \
- | xargs --no-run-if-empty -0 -P4 -n1 xz -9v
-
- find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' \! -name '*.xz' -mmin +720 \
- | xargs --no-run-if-empty -0 -P4 -n1 xz -9v
- finddup -l -d $base/backup
+ find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
}
function logstats() {
def parse_file_dates(list):
out = []
# dump_2006.05.02-11:52:01.bz2
- p = re.compile('^\./dump_([0-9]{4})\.([0-9]{2})\.([0-9]{2})-([0-9]{2}):([0-9]{2}):([0-9]{2})(.bz2)?$')
+ p = re.compile('^\./dump_([0-9]{4})\.([0-9]{2})\.([0-9]{2})-([0-9]{2}):([0-9]{2}):([0-9]{2})(.xz)?$')
for file in list:
m = p.search(file)
if m: