$scriptsdir/update-pseudopackages.sh
}
-# Process (oldstable)-proposed-updates "NEW" queue
-function punew_do() {
- date -u -R >> REPORT
- dak process-policy $1 | tee -a REPORT | mail -a "X-Debian: DAK" -e -s "NEW changes in $1" debian-release@lists.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
- echo >> REPORT
-}
-function punew() {
- log "Doing automated p-u-new processing"
- cd "${queuedir}/p-u-new"
- punew_do "$1"
-}
-function opunew() {
- log "Doing automated o-p-u-new processing"
- cd "${queuedir}/o-p-u-new"
- punew_do "$1"
-}
-
# The first i18n one, syncing new descriptions
function i18n1() {
log "Synchronizing i18n package descriptions"
for dir in wheezy sid; do
if [ -d dists/${dir}/ ]; then
cd dists/${dir}/main/i18n
- rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
+ rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
fi
cd ${scriptdir}/i18nsync
done
function packages() {
log "Generating Packages and Sources files"
- #cd $configdir
- #dak generate-packages-sources
dak generate-packages-sources2
dak contents generate
}
}
function release() {
+ # XXX: disable once we can remove i18n/Index (#649314)
+ log "Generating i18n/Index"
+ (
+ cd "$ftpdir/dists";
+ for dist in testing unstable experimental proposed-updates testing-proposed-updates; do
+ $scriptsdir/generate-i18n-Index $dist;
+ done
+ )
log "Generating Release files"
dak generate-releases
}
FILENAME=ls-lR
log "Removing any core files ..."
- find -type f -name core -print0 | xargs -0r rm -v
-
- log "Checking permissions on files in the FTP tree ..."
- find -type f \( \! -perm -444 -o -perm +002 \) -ls
- find -type d \( \! -perm -555 -o -perm +002 \) -ls
+ find -type f -name core -print -delete
log "Checking symlinks ..."
symlinks -rd .
log "Creating recursive directory listing ... "
- rm -f .${FILENAME}.new
- TZ=UTC ls -lR > .${FILENAME}.new
-
- if [ -r ${FILENAME}.gz ] ; then
- mv -f ${FILENAME}.gz ${FILENAME}.old.gz
- mv -f .${FILENAME}.new ${FILENAME}
- rm -f ${FILENAME}.patch.gz
- zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz
- rm -f ${FILENAME}.old.gz
- else
- mv -f .${FILENAME}.new ${FILENAME}
- fi
-
- gzip -9cfN ${FILENAME} >${FILENAME}.gz
- rm -f ${FILENAME}
+ rm -f ${FILENAME}.gz
+ TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
}
function mkmaintainers() {
function copyoverrides() {
log 'Copying override files into public view ...'
- for ofile in $copyoverrides ; do
- cd $overridedir
- chmod g+w override.$ofile
-
- cd $indices
-
- newofile=override.$ofile.gz
- rm -f .newover-$ofile.gz
- pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`"
- if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then
- log " installing new $newofile $pc"
- mv -f .newover-$ofile.gz $newofile
- chmod g+w $newofile
- else
- rm -f .newover-$ofile.gz
- fi
+ for ofile in ${overridedir}/override.{squeeze,wheezy,sid}.{,extra.}{main,contrib,non-free}*; do
+ bname=${ofile##*/}
+ gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
+ chmod g+w ${indices}/${bname}.gz
done
}
function mkfilesindices() {
+ set +o pipefail
umask 002
cd $base/ftp/indices/files/components
ARCHLIST=$(tempfile)
log "Querying postgres"
- echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql -At | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST
+ local query='SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)'
+ psql -At -c "$query" | sed 's/|//;s,^/srv/ftp-master.debian.org/ftp,.,' | sort >$ARCHLIST
includedirs () {
perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
log "Generating suite lists"
suite_list () {
- printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t
+ local query
+ query="$(printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)' $1 $1)"
+ psql -F' ' -A -t -c "$query"
- printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t
+ query="$(printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d' $1)"
+ psql -F' ' -A -t -c "$query"
}
- printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At |
+ psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
while read id suite; do
[ -e $base/ftp/dists/$suite ] || continue
(
rm -f $ARCHLIST
log "Done!"
+ set -o pipefail
}
function mkchecksums() {
}
function mirrorpush() {
+ log "Checking the public archive copy"
+ cd ${mirrordir}/dists
+
+ broken=0
+ for release in $(find . -name "InRelease"); do
+ echo "Processing: ${release}"
+ subdir=${release%/InRelease}
+ while read SHASUM SIZE NAME; do
+ if ! [ -f "${subdir}/${NAME}" ]; then
+ bname=$(basename ${NAME})
+ if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
+ # We don't keep unpacked files, don't check for their existance.
+ # We might want to go and check their unpacked shasum, but right now
+ # I don't care. I believe it should be enough if all the packed shasums
+ # match.
+ continue
+ fi
+ broken=$(( broken + 1 ))
+ echo "File ${subdir}/${NAME} is missing"
+ continue
+ fi
+
+ # We do have symlinks in the tree (see the contents files currently).
+ # So we use "readlink -f" to check the size of the target, as thats basically
+ # what gen-releases does
+ fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
+ if [ ${fsize} -ne ${SIZE} ]; then
+ broken=$(( broken + 1 ))
+ echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
+ continue
+ fi
+
+ fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
+ fshasum=${fshasum%% *}
+ if [ "${fshasum}" != "${SHASUM}" ]; then
+ broken=$(( broken + 1 ))
+ echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
+ continue
+ fi
+ done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
+ done
+
+ if [ $broken -gt 0 ]; then
+ log_error "Trouble with the public mirror, found ${broken} errors"
+ return 21
+ fi
+
log "Starting the mirrorpush"
date -u > /srv/ftp.debian.org/web/mirrorstart
echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
STAMP=$(date "+%Y%m%d%H%M")
mkdir -p ${scriptdir}/i18n/${STAMP}
cd ${scriptdir}/i18n/${STAMP}
- dak control-suite -l stable > squeeze
- dak control-suite -l testing > wheezy
- dak control-suite -l unstable > sid
+ for suite in stable testing unstable; do
+ codename=$(dak admin s show ${suite}|grep '^Codename')
+ codename=${codename##* }
+ echo "Codename is ${codename}"
+ dak control-suite -l ${suite} >${codename}
+ done
echo "${STAMP}" > timestamp
gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
rm -f md5sum
rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
sudo -H -u archvsync /home/archvsync/runmirrors metaftpdo > ~dak/runmirrors-metadata.log 2>&1 &
}
+
+function gitpdiff() {
+ # Might be that we want to change this to have more than one git repository.
+ # Advantage of one is that we do not need much space in terms of storage in git itself,
+ # git gc is pretty good on our input.
+ # But it might be faster. Well, lets test.
+ log "Adjusting the git tree for pdiffs"
+ cd ${dbdir}/git/git/
+
+ # The regex needs the architectures seperated with \|
+ garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
+
+ # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
+ # want to work with.
+ # Also, we only want contents, packages and sources.
+ for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
+ log "${file}"
+ basen=${file%%.gz};
+ basen=${basen##../};
+ dir=${basen%/*};
+ mkdir -p $dir;
+ zcat $file > $basen;
+ done
+
+ # Second, add all there is into git
+ cd dists
+ git add .
+ # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
+ COMD=$(date -Is)
+ TAGD=$(date +%Y-%m-%d-%H-%M)
+ git commit -m "Commit of ${COMD}"
+ git tag "${TAGD}"
+ }