fi
}
+# Syncing AppStream/DEP-11 data
+function dep11() {
+ log "Synchronizing AppStream metadata"
+ # First sync their newest data
+ mkdir -p ${scriptdir}/dep11
+ cd ${scriptdir}/dep11
+ rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true
+
+ # Lets check!
+ if ${scriptsdir}/dep11-basic-validate.py . ${scriptdir}/dep11/; then
+ # Yay, worked, lets copy around
+ for dir in stretch sid; do
+ if [ -d ${dir}/ ]; then
+ for comp in main contrib non-free; do
+ cd dists/${dir}/${comp}/dep11
+ rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
+ cd ${scriptdir}/dep11
+ fi
+ fi
+ done
+ else
+ echo "ARRRR, bad guys, wrong files, ARRR"
+ echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
+ fi
+}
+
function cruft() {
log "Checking for cruft in overrides"
dak check-overrides
function autocruft() {
log "Check for obsolete binary packages"
- suites=${1:-"unstable experimental"}
- # Initially only run with -n and output into a file, to check.
- dstamp=$(date -u +"%Y-%m-%d_%H:%M")
- echo "Report for ${dstamp}" > $webdir/auto-cruft-report_${dstamp}.txt
- for suite in ${suites}; do
- dak auto-decruft -n -s ${suite} >> $webdir/auto-cruft-report_${dstamp}.txt
- done
- cat $webdir/auto-cruft-report_${dstamp}.txt | mail -a "X-Debian: DAK" -e -s "Debian archive AUTOMATIC cruft report for ${dstamp}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" -c niels@thykier.net ftpmaster@ftp-master.debian.org
+ dak auto-decruft -s unstable
+ dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
}
function fingerprints() {
function copyoverrides() {
log 'Copying override files into public view ...'
- for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
- bname=${ofile##*/}
- gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
- chmod g+w ${indices}/${bname}.gz
- done
+ (
+ shopt -s nullglob
+ for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
+ bname=${ofile##*/}
+ gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
+ chmod g+w ${indices}/${bname}.gz
+ done
+ )
}
function mkfilesindices() {
while read SHASUM SIZE NAME; do
if ! [ -f "${subdir}/${NAME}" ]; then
bname=$(basename ${NAME})
- if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
+ if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+)$ ]]; then
# We don't keep unpacked files, don't check for their existance.
# We might want to go and check their unpacked shasum, but right now
# I don't care. I believe it should be enough if all the packed shasums
function cleantransactions() {
log "Cleanup transaction ids older than 3 months"
cd $base/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
+ find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
}
function logstats() {