# )
# stage $GO
+# we need to wait for the background processes before the end of dinstall
+wait
+
log "Daily cron scripts successful, all done"
exec > "$logdir/afterdinstall.log" 2>&1
function do_dists () {
cd $configdir
dak generate-filelist
- GZIP='--rsyncable' ; export GZIP
dak generate-packages-sources
}
function packages() {
log "Generating Packages and Sources files"
cd $configdir
- GZIP='--rsyncable' ; export GZIP
apt-ftparchive generate apt.conf
#dak generate-packages-sources
}
mv -f ${FILENAME}.gz ${FILENAME}.old.gz
mv -f .${FILENAME}.new ${FILENAME}
rm -f ${FILENAME}.patch.gz
- zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
+ zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz
rm -f ${FILENAME}.old.gz
else
mv -f .${FILENAME}.new ${FILENAME}
fi
- gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
+ gzip -9cfN ${FILENAME} >${FILENAME}.gz
rm -f ${FILENAME}
}
if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
log "installing Maintainers ... "
mv -f .new-maintainers Maintainers
- gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
+ gzip -9v <Maintainers >.new-maintainers.gz
mv -f .new-maintainers.gz Maintainers.gz
else
rm -f .new-maintainers
if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then
log "installing Uploaders ... "
mv -f .new-uploaders Uploaders
- gzip --rsyncable -9v <Uploaders >.new-uploaders.gz
+ gzip -9v <Uploaders >.new-uploaders.gz
mv -f .new-uploaders.gz Uploaders.gz
else
rm -f .new-uploaders
cd $base/ftp
find ./dists -maxdepth 1 \! -type d
find ./dists \! -type d | grep "/source/"
- ) | sort -u | gzip --rsyncable -9 > source.list.gz
+ ) | sort -u | gzip -9 > source.list.gz
log "Generating arch lists"
cd $base/ftp
find ./dists -maxdepth 1 \! -type d
find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
- ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
+ ) | sort -u | gzip -9 > arch-$a.list.gz
done
log "Generating suite lists"
done
)
suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,'
- ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
+ ) | sort -u | gzip -9 > suite-${suite}.list.gz
done
log "Finding everything on the ftp site to generate sundries"
(cd $base/ftp/
for dist in sid squeeze; do
- find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
+ find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
done
)
cd "$ftpdir"
${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
- ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
+ ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
${bindir}/dsync-flist -q link-dups $dsynclist || true
}
PATH=$masterdir:$PATH
umask 022
unset CDPATH
+GZIP='--rsyncable' ; export GZIP
# Set the database variables
eval $(dak admin config db-shell)
Dir
{
- ArchiveDir "/org/security.debian.org/ftp/";
- OverrideDir "/org/security.debian.org/override/";
- CacheDir "/org/security.debian.org/dak-database/";
+ ArchiveDir "/srv/security-master.debian.org/ftp/";
+ OverrideDir "/srv/security-master.debian.org/override/";
+ CacheDir "/srv/security-master.debian.org/dak-database/";
};
Default
{
- Packages::Compress ". gzip bzip2";
+ Packages::Compress "gzip bzip2";
Sources::Compress "gzip bzip2";
DeLinkLimit 0;
FileMode 0664;
}
-tree "dists/oldstable/updates"
-{
- FileList "/org/security.debian.org/dak-database/dists/oldstable_updates/$(SECTION)_binary-$(ARCH).list";
- SourceFileList "/org/security.debian.org/dak-database/dists/oldstable_updates/$(SECTION)_source.list";
- Sections "main contrib non-free";
- Architectures "alpha amd64 arm hppa i386 ia64 mips mipsel powerpc s390 sparc source";
- BinOverride "override.etch.$(SECTION)";
- ExtraOverride "override.etch.extra.$(SECTION)";
- SrcOverride "override.etch.$(SECTION).src";
- Contents " ";
- Packages::Compress "gzip bzip2";
- Sources::Compress "gzip bzip2";
-};
-
tree "dists/stable/updates"
{
- FileList "/org/security.debian.org/dak-database/dists/stable_updates/$(SECTION)_binary-$(ARCH).list";
- SourceFileList "/org/security.debian.org/dak-database/dists/stable_updates/$(SECTION)_source.list";
+ FileList "/srv/security-master.debian.org/dak-database/dists/stable_updates/$(SECTION)_binary-$(ARCH).list";
+ SourceFileList "/srv/security-master.debian.org/dak-database/dists/stable_updates/$(SECTION)_source.list";
Sections "main contrib non-free";
Architectures "alpha amd64 arm armel hppa i386 ia64 mips mipsel powerpc s390 sparc source";
BinOverride "override.lenny.$(SECTION)";
tree "dists/testing/updates"
{
- FileList "/org/security.debian.org/dak-database/dists/testing_updates/$(SECTION)_binary-$(ARCH).list";
- SourceFileList "/org/security.debian.org/dak-database/dists/testing_updates/$(SECTION)_source.list";
+ FileList "/srv/security-master.debian.org/dak-database/dists/testing_updates/$(SECTION)_binary-$(ARCH).list";
+ SourceFileList "/srv/security-master.debian.org/dak-database/dists/testing_updates/$(SECTION)_source.list";
Sections "main contrib non-free";
- Architectures "alpha amd64 armel hppa i386 ia64 mips mipsel powerpc s390 sparc source";
+ Architectures "alpha amd64 armel hppa i386 ia64 kfreebsd-i386 kfreebsd-amd64 mips mipsel powerpc s390 sparc source";
BinOverride "override.squeeze.$(SECTION)";
ExtraOverride "override.squeeze.extra.$(SECTION)";
SrcOverride "override.squeeze.$(SECTION).src";
Dir
{
- ArchiveDir "/srv/security.debian.org/buildd/";
- OverrideDir "/srv/security.debian.org/override/";
- CacheDir "/srv/security.debian.org/dak-database/";
+ ArchiveDir "/srv/security-master.debian.org/buildd/";
+ OverrideDir "/srv/security-master.debian.org/override/";
+ CacheDir "/srv/security-master.debian.org/dak-database/";
};
Default
FileMode 0664;
}
-bindirectory "etch"
-{
- Packages "etch/Packages";
- Sources "etch/Sources";
- Contents " ";
-
- BinOverride "override.etch.all3";
- SrcOverride "override.etch.all3.src";
- BinCacheDB "packages-accepted-etch.db";
- PathPrefix "";
- Packages::Extensions ".deb .udeb";
-};
-
bindirectory "lenny"
{
Packages "lenny/Packages";
PathPrefix "";
Packages::Extensions ".deb .udeb";
};
-
#
# Executed after cron.unchecked
-ARCHS_oldstable="alpha amd64 arm hppa i386 ia64 mips mipsel powerpc sparc s390"
-ARCHS_stable="alpha amd64 arm armel hppa i386 ia64 mips mipsel powerpc sparc s390"
-ARCHS_testing="alpha amd64 armel hppa i386 ia64 mips mipsel powerpc sparc s390"
-DISTS="oldstable stable testing"
-SSH_SOCKET=~/.ssh/buildd.debian.org.socket
-
set -e
-export SCRIPTVARS=/org/security.debian.org/dak/config/debian-security/vars
+set -u
+
+export SCRIPTVARS=/srv/security-master.debian.org/dak/config/debian-security/vars
. $SCRIPTVARS
+SSH_SOCKET=~/.ssh/buildd.debian.org.socket
+DISTS=$(dak admin s list)
if [ -e $ftpdir/Archive_Maintenance_In_Progress ]; then
- exit 0
+ exit 0
fi
-cd $masterdir
-for d in $DISTS; do
- eval SOURCES_$d=`stat -c "%Y" $base/buildd/$d/Sources.gz`
- eval PACKAGES_$d=`stat -c "%Y" $base/buildd/$d/Packages.gz`
+for dist in $DISTS; do
+ eval SOURCES_$dist=`stat -c "%Y" $base/buildd/$dist/Sources.gz`
+ eval PACKAGES_$dist=`stat -c "%Y" $base/buildd/$dist/Packages.gz`
done
+cd $configdir
apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate apt.conf.buildd
+
+cd ${base}/buildd
+for dist in $DISTS; do
+ rm -f $dist/Release*
+ darchs=$(dak admin s-a list-arch $dist | tr '\n' ' ')
+ codename=$(dak admin s show ${dist} | grep ^Codename | awk '{print $2}')
+ apt-ftparchive -qq -o APT::FTPArchive::Release::Codename="${codename}" -o APT::FTPArchive::Release::Origin="Debian" -o APT::FTPArchive::Release::Label="Debian" -o APT::FTPArchive::Release::Description="buildd $dist security" -o APT::FTPArchive::Release::Architectures="${darchs}" release $dist > Release
+ gpg --secret-keyring ${base}/s3kr1t/dot-gnupg/secring.gpg --keyring ${base}/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o Release.gpg Release
+ mv Release* $dist/.
+done
+
dists=
-for d in $DISTS; do
- eval NEW_SOURCES_$d=`stat -c "%Y" $base/buildd/$d/Sources.gz`
- eval NEW_PACKAGES_$d=`stat -c "%Y" $base/buildd/$d/Packages.gz`
- old=SOURCES_$d
- new=NEW_$old
- if [ ${!new} -gt ${!old} ]; then
- if [ -z "$dists" ]; then
- dists="$d"
- else
- dists="$dists $d"
- fi
- continue
- fi
- old=PACKAGES_$d
- new=NEW_$old
- if [ ${!new} -gt ${!old} ]; then
- if [ -z "$dists" ]; then
- dists="$d"
- else
- dists="$dists $d"
- fi
- continue
- fi
+
+
+for dist in $DISTS; do
+ eval NEW_SOURCES_$dist=`stat -c "%Y" $base/buildd/$dist/Sources.gz`
+ eval NEW_PACKAGES_$dist=`stat -c "%Y" $base/buildd/$dist/Packages.gz`
+ old=SOURCES_$dist
+ new=NEW_$old
+ if [ ${!new} -gt ${!old} ]; then
+ if [ -z "$dists" ]; then
+ dists="$dist"
+ else
+ dists="$dists $dist"
+ fi
+ continue
+ fi
+ old=PACKAGES_$dist
+ new=NEW_$old
+ if [ ${!new} -gt ${!old} ]; then
+ if [ -z "$dists" ]; then
+ dists="$dist"
+ else
+ dists="$dists $dist"
+ fi
+ continue
+ fi
done
if [ ! -z "$dists" ]; then
- # setup ssh master process
- ssh wbadm@buildd -S $SSH_SOCKET -MN 2> /dev/null &
- SSH_PID=$!
- while [ ! -S $SSH_SOCKET ]; do
- sleep 1
- done
- trap 'kill -TERM $SSH_PID' 0
- for d in $dists; do
- ssh wbadm@buildd -S $SSH_SOCKET trigger.security $d
- done
+ # setup ssh master process
+ ssh wbadm@buildd -S $SSH_SOCKET -MN 2> /dev/null &
+ SSH_PID=$!
+ while [ ! -S $SSH_SOCKET ]; do
+ sleep 1
+ done
+ trap 'kill -TERM $SSH_PID' 0
+ for d in $dists; do
+ ssh wbadm@buildd -S $SSH_SOCKET trigger.security $d
+ done
fi
-
-#!/bin/sh
+#!/bin/bash
#
# Executed daily via cron, out of dak's crontab.
set -e
-export SCRIPTVARS=/org/security.debian.org/dak/config/debian-security/vars
+export SCRIPTVARS=/srv/security-master.debian.org/dak/config/debian-security/vars
. $SCRIPTVARS
################################################################################
# Fix overrides
-rsync --delete -r --include=override\* --exclude=\* --password-file /srv/non-us.debian.org/s3kr1t/rsync-password -ql security-master@ftp-master::indices/ $overridedir
+rsync --delete -r --include=override\* --exclude=\* --password-file /srv/security-master.debian.org/s3kr1t/rsync-password -ql security-master@ftp-master::indices/ $overridedir
cd $overridedir
for file in override*.gz; do
################################################################################
-cd $masterdir
+cd $configdir
+dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
dak clean-queues
+dak clean-queues -i $disembargo
dak clean-suites
-apt-ftparchive -q clean apt.conf
-apt-ftparchive -q clean apt.conf.buildd
symlinks -d -r $ftpdir
-pg_dump obscurity > /org/security.debian.org/dak-backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
+pg_dump obscurity > /org/security-master.debian.org/dak-backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
+find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
+while read dumpname; do
+ bzip2 -9fv "$dumpname"
+done
-# Vacuum the database
-set +e
-echo "VACUUM; VACUUM ANALYZE;" | psql obscurity 2>&1 | egrep -v "^NOTICE: Skipping \"pg_.*only table or database owner can VACUUM it$|^VACUUM$"
-set -e
+apt-ftparchive -q clean apt.conf
+apt-ftparchive -q clean apt.conf.buildd
################################################################################
-#! /bin/sh
+#! /bin/bash
set -e
-export SCRIPTVARS=/org/security.debian.org/dak/config/debian-security/vars
+set -u
+
+export SCRIPTVARS=/srv/security-master.debian.org/dak/config/debian-security/vars
. $SCRIPTVARS
+# And use one locale, no matter what the caller has set
+export LANG=C
+export LC_ALL=C
+
report=$queuedir/REPORT
reportdis=$queuedir/REPORT.disembargo
timestamp=$(date "+%Y-%m-%d %H:%M")
doanything=false
+dopolicy=false
+
+# So first we should go and see if any process-policy action is done
+dak process-policy embargo | mail -a "X-Debian: DAK" -e -s "Automatically accepted from embargoed" team@security.debian.org -- -F "Debian FTP Masters" -f ftonaster@ftp-master.debian.org
+dak process-policy disembargo | mail -a "X-Debian: DAK" -e -s "Automatically accepted from unembargoed" team@security.debian.org -- -F "Debian FTP Masters" -f ftonaster@ftp-master.debian.org
+
+# Now, if this really did anything, we can then sync it over. Files
+# in newstage mean they are (late) accepts of security stuff, need
+# to sync to ftp-master
+
+cd $newstage
+changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
+if [ -n "$changes" ]; then
+ dopolicy=true
+ echo "$timestamp": ${changes:-"Nothing to do in newstage"} >> $report
+ rsync -a -q $newstage/. /srv/queued/ftpmaster/.
+ dak process-upload -a -d "$newstage" >> $report
+fi
cd $unchecked
changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
if [ -n "$changes" ]; then
- doanything=true
- echo "$timestamp": "$changes" >> $report
- dak process-unchecked -a $changes >> $report
- echo "--" >> $report
+ doanything=true
+ echo "$timestamp": ${changes:-"Nothing to do in unchecked"} >> $report
+ dak process-upload -a -d "$unchecked" >> $report
fi
cd $disembargo
changes=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.changes | sed -e "s,./,," | xargs)
if [ -n "$changes" ]; then
- doanything=true
- echo "$timestamp": "$changes" >> $reportdis
- dak process-unchecked -a $changes >> $reportdis
- echo "--" >> $reportdis
+ doanything=true
+ echo "$timestamp": ${changes:-"Nothing to do in disembargo"} >> $reportdis
+ dak process-upload -a -d "$disembargo" >> $reportdis
fi
if ! $doanything; then
exit 0
fi
-sh $masterdir/cron.buildd
+if [ "x${dopolicy}x" = "xtruex" ]; then
+ # We had something approved from a policy queue, push out new archive
+ dak dominate
+ dak generate-filelist
+ cd $configdir
+ $configdir/map.sh
+ apt-ftparchive generate apt.conf
+ dak generate-releases
+ /srv/security-master.debian.org/dak/config/debian-security/make-mirror.sh
+ sudo -u archvsync -H /home/archvsync/signal_security
+fi
+
+$configdir/cron.buildd
-#!/bin/sh
+#!/bin/bash
#
# Executed weekly via cron, out of dak's crontab.
set -e
-export SCRIPTVARS=/org/security.debian.org/dak/config/debian-security/vars
+export SCRIPTVARS=/srv/security-master.debian.org/dak/config/debian-security/vars
. $SCRIPTVARS
################################################################################
# Weekly generation of release files, then pushing mirrors.
# Used as we have a "Valid-until" field in our release files of 10 days. In case
# we dont have a security update in that time...
-cd $masterdir
+cd $configdir
dak generate-releases
+/srv/security-master.debian.org/dak/config/debian-security/make-mirror.sh
sudo -u archvsync -H /home/archvsync/signal_security
Dinstall
{
GPGKeyring {
- "/org/keyring.debian.org/keyrings/debian-keyring.gpg";
+ "/srv/keyring.debian.org/keyrings/debian-keyring.gpg";
};
- SigningKeyring "/org/non-us.debian.org/s3kr1t/dot-gnupg/secring.gpg";
- SigningPubKeyring "/org/non-us.debian.org/s3kr1t/dot-gnupg/pubring.gpg";
+ // was non-us.d.o path before
+ SigningKeyring "/srv/security-master.debian.org/s3kr1t/dot-gnupg/secring.gpg";
+ SigningPubKeyring "/srv/security-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg";
SigningKeyIds "55BE302B";
SendmailCommand "/usr/sbin/sendmail -odq -oi -t";
MyEmailAddress "Debian Installer <installer@ftp-master.debian.org>";
MyDistribution "Debian"; // Used in emails
BugServer "bugs.debian.org";
PackagesServer "packages.debian.org";
- LockFile "/org/security.debian.org/dak/lock";
+ LockFile "/org/security-master.debian.org/dak/lock";
Bcc "archive@ftp-master.debian.org";
// GroupOverrideFilename "override.group-maint";
FutureTimeTravelGrace 28800; // 8 hours
BXANotify "false";
QueueBuildSuites
{
- oldstable;
stable;
testing;
};
Process-New
{
- AcceptedLockFile "/org/security.debian.org/lock/unchecked.lock";
+ DinstallLockFile "/srv/security-master.debian.org/lock/processnew.lock";
LockDir "/srv/security-master.debian.org/lock/new/";
};
{
Directories
{
- // byhand;
- // new;
+ byhand;
+ new;
unembargoed;
+ embargoed;
+ };
+};
+
+Import-Keyring
+{
+ /srv/keyring.debian.org/keyrings/debian-maintainers.gpg
+ {
+ Debian-Maintainer "true";
+ };
+};
+
+Import-LDAP-Fingerprints
+{
+ LDAPDn "ou=users,dc=debian,dc=org";
+ LDAPServer "db.debian.org";
+ ExtraKeyrings
+ {
+ "/srv/keyring.debian.org/keyrings/removed-keys.pgp";
+ "/srv/keyring.debian.org/keyrings/removed-keys.gpg";
+ "/srv/keyring.debian.org/keyrings/extra-keys.pgp";
+ };
+ KeyServer "wwwkeys.eu.pgp.net";
+};
+
+Check-Overrides
+{
+ OverrideSuites
+ {
+ Stable
+ {
+ Process "0";
+ };
+
+ Testing
+ {
+ Process "0";
+ };
+
};
};
};
MyEmailAddress "Debian Archive Maintenance <ftpmaster@ftp-master.debian.org>";
- LogFile "/org/security.debian.org/dak-log/removals.txt";
+ LogFile "/srv/security-master.debian.org/dak-log/removals.txt";
};
Init-Archive
{
- ExportDir "/org/security.debian.org/dak/import-archive-files/";
+ ExportDir "/srv/security-master.debian.org/dak/import-archive-files/";
};
Clean-Suites
// Priority determines which suite is used for the Maintainers file
// as generated by 'dak make-maintainers' (highest wins).
- OldStable
- {
- Components
- {
- updates/main;
- updates/contrib;
- updates/non-free;
- };
- Announce "dak@security.debian.org";
- Version "";
- Origin "Debian";
- Label "Debian-Security";
- Description "Debian 4.0 Security Updates";
- ValidTime 864000; // 10 days
- CodeName "etch";
- OverrideCodeName "etch";
- CopyDotDak "/org/security.debian.org/queue/done/";
- };
-
Stable
{
Components
ValidTime 864000; // 10 days
CodeName "lenny";
OverrideCodeName "lenny";
- CopyDotDak "/org/security.debian.org/queue/done/";
+ CopyDotDak "/srv/security-master.debian.org/queue/done/";
};
Testing
ValidTime 864000; // 10 days
CodeName "squeeze";
OverrideCodeName "squeeze";
- CopyDotDak "/org/security.debian.org/queue/done/";
+ CopyDotDak "/srv/security-master.debian.org/queue/done/";
};
};
Dir
{
- Root "/org/security.debian.org/ftp/";
- Pool "/org/security.debian.org/ftp/pool/";
- Dak "/org/security.debian.org/dak/";
- Templates "/org/security.debian.org/dak/templates/";
+ Root "/srv/security-master.debian.org/ftp/";
+ Pool "/srv/security-master.debian.org/ftp/pool/";
+ Dak "/srv/security-master.debian.org/dak/";
+ Templates "/srv/security-master.debian.org/dak/templates/";
PoolRoot "pool/";
- Override "/org/security.debian.org/override/";
- Lock "/org/security.debian.org/lock/";
- Lists "/org/security.debian.org/dak-database/dists/";
- Log "/org/security.debian.org/dak-log/";
- Morgue "/org/security.debian.org/morgue/";
+ Override "/srv/security-master.debian.org/override/";
+ Lock "/srv/security-master.debian.org/lock/";
+ Cache "/srv/security-master.debian.org/database/";
+ Lists "/srv/security-master.debian.org/dak-database/dists/";
+ Log "/srv/security-master.debian.org/dak-log/";
+ Morgue "/srv/security-master.debian.org/morgue/";
MorgueReject "reject";
- Override "/org/security.debian.org/scripts/override/";
- QueueBuild "/org/security.debian.org/buildd/";
- Upload "/srv/queued/UploadQueue/";
+ Override "/srv/security-master.debian.org/scripts/override/";
+ QueueBuild "/srv/security-master.debian.org/buildd/";
+ Upload "/srv/queued/ftpmaster/";
Queue
{
- Accepted "/org/security.debian.org/queue/accepted/";
- Byhand "/org/security.debian.org/queue/byhand/";
- Done "/org/security.debian.org/queue/done/";
- Holding "/org/security.debian.org/queue/holding/";
- New "/org/security.debian.org/queue/new/";
- Reject "/org/security.debian.org/queue/reject/";
- Unchecked "/org/security.debian.org/queue/unchecked/";
+ Byhand "/srv/security-master.debian.org/queue/byhand/";
+ Done "/srv/security-master.debian.org/queue/done/";
+ Holding "/srv/security-master.debian.org/queue/holding/";
+ New "/srv/security-master.debian.org/queue/new/";
+ Reject "/srv/security-master.debian.org/queue/reject/";
+ Unchecked "/srv/security-master.debian.org/queue/unchecked/";
+ Newstage "/srv/security-master.debian.org/queue/newstage/";
+
ProposedUpdates "/does/not/exist/"; // XXX fixme
OldProposedUpdates "/does/not/exist/"; // XXX fixme
- Embargoed "/org/security.debian.org/queue/embargoed/";
- Unembargoed "/org/security.debian.org/queue/unembargoed/";
- Disembargo "/org/security.debian.org/queue/unchecked-disembargo/";
+ Embargoed "/srv/security-master.debian.org/queue/embargoed/";
+ Unembargoed "/srv/security-master.debian.org/queue/unembargoed/";
+ Disembargo "/srv/security-master.debian.org/queue/unchecked-disembargo/";
};
};
s390 "IBM S/390";
sparc "Sun SPARC/UltraSPARC";
amd64 "AMD x86_64 (AMD64)";
+ kfreebsd-i386 "GNU/kFreeBSD i386";
+ kfreebsd-amd64 "GNU/kFreeBSD amd64";
};
Location
{
- /org/security.debian.org/ftp/pool/
+ /srv/security-master.debian.org/ftp/pool/
{
Archive "security";
Suites
--- /dev/null
+Config
+{
+ chopin.debian.org
+ {
+ DatabaseHostname "security";
+ DakConfig "/org/security-master.debian.org/dak/config/debian-security/dak.conf";
+ AptConfig "/org/security-master.debian.org/dak/config/debian-security/apt.conf";
+ }
+}
\ No newline at end of file
--- /dev/null
+#!/bin/bash
+
+set -e
+
+LANG=C
+LC_ALL=C
+
+echo "Regenerating \"public\" mirror/ hardlink fun"
+date -u > /srv/security-master.debian.org/ftp/project/trace/security-master.debian.org
+echo "Using dak v1" >> /srv/security-master.debian.org/ftp/project/trace/security-master.debian.org
+echo "Running on host: $(hostname -f)" >> /srv/security-master.debian.org/ftp/project/trace/security-master.debian.org
+cd /srv/security.debian.org/archive/debian-security/
+rsync -aH --link-dest /srv/security-master.debian.org/ftp/ --exclude Archive_Maintenance_In_Progress --delete --delete-after --ignore-errors /srv/security-master.debian.org/ftp/. .
#!/bin/bash
-dak make-pkg-file-mapping | bzip2 -9 > /org/security.debian.org/ftp/indices/package-file.map.bz2
+dak make-pkg-file-mapping | bzip2 -9 > /srv/security-master.debian.org/ftp/indices/package-file.map.bz2
# locations used by many scripts
-base=/org/security.debian.org
+base=/srv/security-master.debian.org
+public=/srv/security.debian.org
+bindir=$base/bin
ftpdir=$base/ftp/
-masterdir=$base/dak/config/debian-security/
+masterdir=$base/dak/
+configdir=$masterdir/config/debian-security/
+webdir=$masterdir/web
+indices=$ftpdir/indices
+archs=$(dak admin a list | tr '\n' ' ')
+
+scriptdir=$base/scripts
+scriptsdir=$masterdir/scripts/debian/
+dbdir=$base/dak-database/
+lockdir=$base/lock
+stagedir=$lockdir/stages
overridedir=$base/override
+extoverridedir=$scriptdir/external-overrides
+logdir=$base/log/cron/
+
queuedir=$base/queue/
unchecked=$queuedir/unchecked/
+newstage=$queuedir/newstage/
disembargo=$queuedir/unchecked-disembargo/
-accepted=$queuedir/accepted/
done=$queuedir/done/
+mirrordir=$base/mirror/
+exportdir=$base/export/
+exportpublic=$public/rsync/export/
+
uploadhost=ftp-master.debian.org
uploaddir=/pub/UploadQueue/
+ftpgroup=debadmin
+
components="main non-free contrib"
-suites="oldstable stable testing"
+suites=$(dak admin s list)
override_types="deb dsc udeb"
+TMPDIR=${base}/tmp
+
PATH=$masterdir:$PATH
umask 022
+unset CDPATH
+
+# Set the database variables
+eval $(dak admin config db-shell)
+PATH=$masterdir:$PATH
+umask 022
clean_debbugs
+# Generate list of override disparities
+dak override-disparity | gzip -9 > ${webdir}/override-disparity.gz
+
################################################################################
function do_dists () {
cd $configdir
dak generate-filelist
- GZIP='--rsyncable' ; export GZIP
dak generate-packages-sources >/dev/null
}
function packages() {
log "Generating Packages and Sources files"
cd $configdir
- GZIP='--rsyncable' ; export GZIP
#apt-ftparchive generate apt.conf
dak generate-packages-sources
}
mv -f ${FILENAME}.gz ${FILENAME}.old.gz
mv -f .${FILENAME}.new ${FILENAME}
rm -f ${FILENAME}.patch.gz
- zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
+ zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip -9cfn - >${FILENAME}.patch.gz
rm -f ${FILENAME}.old.gz
else
mv -f .${FILENAME}.new ${FILENAME}
fi
- gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
+ gzip -9cfN ${FILENAME} >${FILENAME}.gz
rm -f ${FILENAME}
}
if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
log "installing Maintainers ... "
mv -f .new-maintainers Maintainers
- gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
+ gzip -9v <Maintainers >.new-maintainers.gz
mv -f .new-maintainers.gz Maintainers.gz
else
rm -f .new-maintainers
if ! cmp -s .new-uploaders Uploaders || [ ! -f Uploaders ]; then
log "installing Uploaders ... "
mv -f .new-uploaders Uploaders
- gzip --rsyncable -9v <Uploaders >.new-uploaders.gz
+ gzip -9v <Uploaders >.new-uploaders.gz
mv -f .new-uploaders.gz Uploaders.gz
else
rm -f .new-uploaders
cd $base/ftp
find ./dists -maxdepth 1 \! -type d
find ./dists \! -type d | grep "/source/"
- ) | sort -u | gzip --rsyncable -9 > source.list.gz
+ ) | sort -u | gzip -9 > source.list.gz
log "Generating arch lists"
cd $base/ftp
find ./dists -maxdepth 1 \! -type d
find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
- ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
+ ) | sort -u | gzip -9 > arch-$a.list.gz
done
log "Generating suite lists"
done
)
suite_list $id | tr -d ' ' | sed 's,^/srv/ftp-master.debian.org/ftp,.,'
- ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
+ ) | sort -u | gzip -9 > suite-${suite}.list.gz
done
log "Finding everything on the ftp site to generate sundries"
(cd $base/ftp/
for dist in sid squeeze; do
- find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
+ find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
done
)
cd "$ftpdir"
${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
- ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
+ ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
${bindir}/dsync-flist -q link-dups $dsynclist || true
}
PATH=$masterdir:$PATH
umask 022
unset CDPATH
+GZIP='--rsyncable' ; export GZIP
# Set the database variables
eval $(dak admin config db-shell)
"Generate changelog between two suites"),
("copy-installer",
"Copies the installer from one suite to another"),
+ ("override-disparity",
+ "Generate a list of override disparities"),
]
return functionality
for q in c.fetchall():
queues[q[0]] = q[1]
- if q[1] in ['accepted', 'buildd']:
+ if q[1] in ['accepted', 'buildd', 'embargoed', 'unembargoed']:
# Move to build_queue_table
c.execute("""INSERT INTO build_queue (queue_name, path, copy_files)
VALUES ('%s', '%s', '%s')""" % (q[1], q[2], q[3]))
AND s.file = f.id AND f.location = l.id AND l.component = c.id
AND s.maintainer = m.id %s
""" % (comparison_operator, con_suites), {'package': package})
- ql.extend(q.fetchall())
+ if not Options["Architecture"] or con_architectures:
+ ql.extend(q.fetchall())
+ else:
+ ql = q.fetchall()
d = {}
highver = {}
for i in ql:
#!/usr/bin/env python
-""" Wrapper for Debian Security team """
-# Copyright (C) 2006 Anthony Towns <ajt@debian.org>
+"""
+Do whatever is needed to get a security upload released
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2010 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
-# USA
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
################################################################################
-import apt_pkg, os, sys, pwd, time, commands
+import os
+import sys
+import time
+import apt_pkg
+import commands
from daklib import queue
from daklib import daklog
from daklib import utils
-from daklib.dbconn import DBConn, get_build_queue, get_suite_architectures
+from daklib.dbconn import *
from daklib.regexes import re_taint_free
+from daklib.config import Config
-Cnf = None
Options = None
-Upload = None
Logger = None
-
-advisory = None
+Queue = None
changes = []
-srcverarches = {}
-
-def init():
- global Cnf, Upload, Options, Logger
- Cnf = utils.get_conf()
- Cnf["Dinstall::Options::No-Mail"] = "y"
- Arguments = [('h', "help", "Security-Install::Options::Help"),
- ('a', "automatic", "Security-Install::Options::Automatic"),
- ('n', "no-action", "Security-Install::Options::No-Action"),
- ('s', "sudo", "Security-Install::Options::Sudo"),
- (' ', "no-upload", "Security-Install::Options::No-Upload"),
- ('u', "fg-upload", "Security-Install::Options::Foreground-Upload"),
- (' ', "drop-advisory", "Security-Install::Options::Drop-Advisory"),
- ('A', "approve", "Security-Install::Options::Approve"),
- ('R', "reject", "Security-Install::Options::Reject"),
- ('D', "disembargo", "Security-Install::Options::Disembargo") ]
+def usage():
+ print """Usage: dak security-install [OPTIONS] changesfiles
+Do whatever there is to do for a security release
- for i in Arguments:
- Cnf[i[2]] = ""
+ -h, --help show this help and exit
+ -n, --no-action don't commit changes
+ -s, --sudo dont bother, used internally
- arguments = apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
+"""
+ sys.exit()
- Options = Cnf.SubTree("Security-Install::Options")
-
- username = utils.getusername()
- if username != "dak":
- print "Non-dak user: %s" % username
- Options["Sudo"] = "y"
-
- if Options["Help"]:
- print "help yourself"
- sys.exit(0)
-
- if len(arguments) == 0:
- utils.fubar("Process what?")
-
- Upload = queue.Upload(Cnf)
- if Options["No-Action"]:
- Options["Sudo"] = ""
- if not Options["Sudo"] and not Options["No-Action"]:
- Logger = Upload.Logger = daklog.Logger(Cnf, "new-security-install")
-
- return arguments
-
-def quit():
- if Logger:
- Logger.close()
- sys.exit(0)
-
-def load_args(arguments):
- global advisory, changes
-
- adv_ids = {}
- if not arguments[0].endswith(".changes"):
- adv_ids [arguments[0]] = 1
- arguments = arguments[1:]
-
- null_adv_changes = []
-
- changesfiles = {}
- for a in arguments:
- if "/" in a:
- utils.fubar("can only deal with files in the current directory")
- if not a.endswith(".changes"):
- utils.fubar("not a .changes file: %s" % (a))
- Upload.init_vars()
- Upload.pkg.changes_file = a
- Upload.update_vars()
- if "adv id" in Upload.pkg.changes:
- changesfiles[a] = 1
- adv_ids[Upload.pkg.changes["adv id"]] = 1
- else:
- null_adv_changes.append(a)
-
- adv_ids = adv_ids.keys()
- if len(adv_ids) > 1:
- utils.fubar("multiple advisories selected: %s" % (", ".join(adv_ids)))
- if adv_ids == []:
- advisory = None
- else:
- advisory = adv_ids[0]
-
- changes = changesfiles.keys()
- return null_adv_changes
-
-def load_adv_changes():
- global srcverarches, changes
-
- for c in os.listdir("."):
- if not c.endswith(".changes"): continue
- Upload.init_vars()
- Upload.pkg.changes_file = c
- Upload.update_vars()
- if "adv id" not in Upload.pkg.changes:
- continue
- if Upload.pkg.changes["adv id"] != advisory:
- continue
-
- if c not in changes: changes.append(c)
- srcver = "%s %s" % (Upload.pkg.changes["source"],
- Upload.pkg.changes["version"])
- srcverarches.setdefault(srcver, {})
- for arch in Upload.pkg.changes["architecture"].keys():
- srcverarches[srcver][arch] = 1
-
-def advisory_info():
- if advisory != None:
- print "Advisory: %s" % (advisory)
- print "Changes:"
- for c in changes:
- print " %s" % (c)
-
- print "Packages:"
- svs = srcverarches.keys()
- svs.sort()
- for sv in svs:
- as_ = srcverarches[sv].keys()
- as_.sort()
- print " %s (%s)" % (sv, ", ".join(as_))
-
-def prompt(opts, default):
- p = ""
- v = {}
- for o in opts:
- v[o[0].upper()] = o
- if o[0] == default:
- p += ", [%s]%s" % (o[0], o[1:])
- else:
- p += ", " + o
- p = p[2:] + "? "
- a = None
-
- if Options["Automatic"]:
- a = default
-
- while a not in v:
- a = utils.our_raw_input(p) + default
- a = a[:1].upper()
-
- return v[a]
-
-def add_changes(extras):
- for c in extras:
- changes.append(c)
- Upload.init_vars()
- Upload.pkg.changes_file = c
- Upload.update_vars()
- srcver = "%s %s" % (Upload.pkg.changes["source"], Upload.pkg.changes["version"])
- srcverarches.setdefault(srcver, {})
- for arch in Upload.pkg.changes["architecture"].keys():
- srcverarches[srcver][arch] = 1
- Upload.pkg.changes["adv id"] = advisory
- Upload.dump_vars(os.getcwd())
-
-def yes_no(prompt):
- if Options["Automatic"]: return True
- while 1:
- answer = utils.our_raw_input(prompt + " ").lower()
- if answer in "yn":
- return answer == "y"
- print "Invalid answer; please try again."
-
-def do_upload():
- if Options["No-Upload"]:
- print "Not uploading as requested"
- elif Options["Foreground-Upload"]:
- actually_upload(changes)
- else:
- child = os.fork()
- if child == 0:
- actually_upload(changes)
- os._exit(0)
- print "Uploading in the background"
-
-def actually_upload(changes_files):
- file_list = ""
- suites = {}
- component_mapping = {}
- for component in Cnf.SubTree("Security-Install::ComponentMappings").List():
- component_mapping[component] = Cnf["Security-Install::ComponentMappings::%s" % (component)]
- uploads = {}; # uploads[uri] = file_list
- changesfiles = {}; # changesfiles[uri] = file_list
- package_list = {} # package_list[source_name][version]
- changes_files.sort(utils.changes_compare)
- for changes_file in changes_files:
- changes_file = utils.validate_changes_file_arg(changes_file)
- # Reset variables
- components = {}
- upload_uris = {}
- file_list = []
- Upload.init_vars()
- # Parse the .dak file for the .changes file
- Upload.pkg.changes_file = changes_file
- Upload.update_vars()
- files = Upload.pkg.files
- changes = Upload.pkg.changes
- dsc = Upload.pkg.dsc
- # Build the file list for this .changes file
- for file in files.keys():
- poolname = os.path.join(Cnf["Dir::Root"], Cnf["Dir::PoolRoot"],
- utils.poolify(changes["source"], files[file]["component"]),
- file)
- file_list.append(poolname)
- orig_component = files[file].get("original component", files[file]["component"])
- components[orig_component] = ""
- # Determine the upload uri for this .changes file
- for component in components.keys():
- upload_uri = component_mapping.get(component)
- if upload_uri:
- upload_uris[upload_uri] = ""
- num_upload_uris = len(upload_uris.keys())
- if num_upload_uris == 0:
- utils.fubar("%s: No valid upload URI found from components (%s)."
- % (changes_file, ", ".join(components.keys())))
- elif num_upload_uris > 1:
- utils.fubar("%s: more than one upload URI (%s) from components (%s)."
- % (changes_file, ", ".join(upload_uris.keys()),
- ", ".join(components.keys())))
- upload_uri = upload_uris.keys()[0]
- # Update the file list for the upload uri
- if not uploads.has_key(upload_uri):
- uploads[upload_uri] = []
- uploads[upload_uri].extend(file_list)
- # Update the changes list for the upload uri
- if not changesfiles.has_key(upload_uri):
- changesfiles[upload_uri] = []
- changesfiles[upload_uri].append(changes_file)
- # Remember the suites and source name/version
- for suite in changes["distribution"].keys():
- suites[suite] = ""
- # Remember the source name and version
- if changes["architecture"].has_key("source") and \
- changes["distribution"].has_key("testing"):
- if not package_list.has_key(dsc["source"]):
- package_list[dsc["source"]] = {}
- package_list[dsc["source"]][dsc["version"]] = ""
-
- for uri in uploads.keys():
- uploads[uri].extend(changesfiles[uri])
- (host, path) = uri.split(":")
- # file_list = " ".join(uploads[uri])
- print "Moving files to UploadQueue"
- for filename in uploads[uri]:
- utils.copy(filename, Cnf["Dir::Upload"])
- # .changes files have already been moved to queue/done by p-a
- if not filename.endswith('.changes'):
- remove_from_buildd(suites, filename)
- #spawn("lftp -c 'open %s; cd %s; put %s'" % (host, path, file_list))
-
- if not Options["No-Action"]:
- filename = "%s/testing-processed" % (Cnf["Dir::Log"])
- file = utils.open_file(filename, 'a')
- for source in package_list.keys():
- for version in package_list[source].keys():
- file.write(" ".join([source, version])+'\n')
- file.close()
-
-def remove_from_buildd(suites, filename):
- """Check the buildd dir for each suite and remove the file if needed"""
- builddbase = Cnf["Dir::QueueBuild"]
- filebase = os.path.basename(filename)
- for s in suites:
- try:
- os.unlink(os.path.join(builddbase, s, filebase))
- except OSError, e:
- pass
- # About no value printing this warning - it only confuses the security team,
- # yet makes no difference otherwise.
- #utils.warn("Problem removing %s from buildd queue %s [%s]" % (filebase, s, str(e)))
-
-
-def generate_advisory(template):
- global changes, advisory
-
- adv_packages = []
- updated_pkgs = {}; # updated_pkgs[distro][arch][file] = {path,md5,size}
-
- for arg in changes:
- arg = utils.validate_changes_file_arg(arg)
- Upload.pkg.changes_file = arg
- Upload.init_vars()
- Upload.update_vars()
-
- src = Upload.pkg.changes["source"]
- src_ver = "%s (%s)" % (src, Upload.pkg.changes["version"])
- if src_ver not in adv_packages:
- adv_packages.append(src_ver)
-
- suites = Upload.pkg.changes["distribution"].keys()
- for suite in suites:
- if not updated_pkgs.has_key(suite):
- updated_pkgs[suite] = {}
-
- files = Upload.pkg.files
- for file in files.keys():
- arch = files[file]["architecture"]
- md5 = files[file]["md5sum"]
- size = files[file]["size"]
- poolname = Cnf["Dir::PoolRoot"] + \
- utils.poolify(src, files[file]["component"])
- if arch == "source" and file.endswith(".dsc"):
- dscpoolname = poolname
- for suite in suites:
- if not updated_pkgs[suite].has_key(arch):
- updated_pkgs[suite][arch] = {}
- updated_pkgs[suite][arch][file] = {
- "md5": md5, "size": size, "poolname": poolname }
-
- dsc_files = Upload.pkg.dsc_files
- for file in dsc_files.keys():
- arch = "source"
- if not dsc_files[file].has_key("files id"):
- continue
-
- # otherwise, it's already in the pool and needs to be
- # listed specially
- md5 = dsc_files[file]["md5sum"]
- size = dsc_files[file]["size"]
- for suite in suites:
- if not updated_pkgs[suite].has_key(arch):
- updated_pkgs[suite][arch] = {}
- updated_pkgs[suite][arch][file] = {
- "md5": md5, "size": size, "poolname": dscpoolname }
-
- if os.environ.has_key("SUDO_UID"):
- whoami = long(os.environ["SUDO_UID"])
- else:
- whoami = os.getuid()
- whoamifull = pwd.getpwuid(whoami)
- username = whoamifull[4].split(",")[0]
-
- Subst = {
- "__ADVISORY__": advisory,
- "__WHOAMI__": username,
- "__DATE__": time.strftime("%B %d, %Y", time.gmtime(time.time())),
- "__PACKAGE__": ", ".join(adv_packages),
- "__DAK_ADDRESS__": Cnf["Dinstall::MyEmailAddress"]
- }
-
- if Cnf.has_key("Dinstall::Bcc"):
- Subst["__BCC__"] = "Bcc: %s" % (Cnf["Dinstall::Bcc"])
-
- adv = ""
- archive = Cnf["Archive::%s::PrimaryMirror" % (utils.where_am_i())]
- for suite in updated_pkgs.keys():
- ver = Cnf["Suite::%s::Version" % suite]
- if ver != "": ver += " "
- suite_header = "%s %s(%s)" % (Cnf["Dinstall::MyDistribution"],
- ver, suite)
- adv += "%s\n%s\n\n" % (suite_header, "-"*len(suite_header))
-
- arches = [x.arch_name for x in get_suite_architectures(suite)]
- if "source" in arches:
- arches.remove("source")
- if "all" in arches:
- arches.remove("all")
- arches.sort()
-
- adv += "%s updates are available for %s.\n\n" % (
- suite.capitalize(), utils.join_with_commas_and(arches))
-
- for a in ["source", "all"] + arches:
- if not updated_pkgs[suite].has_key(a):
- continue
-
- if a == "source":
- adv += "Source archives:\n\n"
- elif a == "all":
- adv += "Architecture independent packages:\n\n"
- else:
- adv += "%s architecture (%s)\n\n" % (a,
- Cnf["Architectures::%s" % a])
-
- for file in updated_pkgs[suite][a].keys():
- adv += " http://%s/%s%s\n" % (
- archive, updated_pkgs[suite][a][file]["poolname"], file)
- adv += " Size/MD5 checksum: %8s %s\n" % (
- updated_pkgs[suite][a][file]["size"],
- updated_pkgs[suite][a][file]["md5"])
- adv += "\n"
- adv = adv.rstrip()
-
- Subst["__ADVISORY_TEXT__"] = adv
-
- adv = utils.TemplateSubst(Subst, template)
- return adv
def spawn(command):
if not re_taint_free.match(command):
if (result != 0):
utils.fubar("Invocation of '%s' failed:\n%s\n" % (command, output), result)
-
##################### ! ! ! N O T E ! ! ! #####################
#
# These functions will be reinvoked by semi-priveleged users, be careful not
def sudo(arg, fn, exit):
if Options["Sudo"]:
- if advisory == None:
- utils.fubar("Must set advisory name")
os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H",
- "/usr/local/bin/dak", "new-security-install", "-"+arg, "--", advisory)
+ "/usr/local/bin/dak", "new-security-install", "-"+arg)
else:
fn()
if exit:
def do_Approve(): sudo("A", _do_Approve, True)
def _do_Approve():
- # 1. dump advisory in drafts
- draft = "/org/security.debian.org/advisories/drafts/%s" % (advisory)
- print "Advisory in %s" % (draft)
- if not Options["No-Action"]:
- adv_file = "./advisory.%s" % (advisory)
- if not os.path.exists(adv_file):
- adv_file = Cnf["Dir::Templates"]+"/security-install.advisory"
- adv_fd = os.open(draft, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0664)
- os.write(adv_fd, generate_advisory(adv_file))
- os.close(adv_fd)
- adv_fd = None
-
- # 2. run dak process-accepted on changes
- print "Accepting packages..."
- spawn("dak process-accepted -pa %s" % (" ".join(changes)))
-
- # 3. run dak make-suite-file-list / apt-ftparchve / dak generate-releases
- print "Updating file lists for apt-ftparchive..."
- spawn("dak make-suite-file-list")
+ # 1. use process-policy to go through the COMMENTS dir
+ spawn("dak process-policy embargo")
+ spawn("dak process-policy disembargo")
+ newstage=get_policy_queue('newstage')
+
+ # 2. sync the stuff to ftpmaster
+ print "Sync stuff for upload to ftpmaster"
+ spawn("rsync -a -q %s/. /srv/queued/ftpmaster/." % (newstage.path))
+
+ # 3. Now run process-upload in the newstage dir
+ print "Now put it into the security archive"
+ spawn("dak process-upload -a -d %s" % (newstage.path))
+
+ # 4. Run all the steps that are needed to publish the changed archive
+ print "Domination"
+ spawn("dak dominate")
+ print "Generating filelist for apt-ftparchive"
spawn("dak generate-filelist")
- print "Updating Packages and Sources files..."
- spawn("/org/security.debian.org/dak/config/debian-security/map.sh")
+ print "Updating Packages and Sources files... This may take a while, be patient"
+ spawn("/srv/security-master.debian.org/dak/config/debian-security/map.sh")
spawn("apt-ftparchive generate %s" % (utils.which_apt_conf_file()))
print "Updating Release files..."
spawn("dak generate-releases")
print "Triggering security mirrors..."
+ spawn("/srv/security-master.debian.org/dak/config/debian-security/make-mirror.sh")
spawn("sudo -u archvsync -H /home/archvsync/signal_security")
- # 4. chdir to done - do upload
- if not Options["No-Action"]:
- os.chdir(Cnf["Dir::Queue::Done"])
- do_upload()
+########################################################################
+########################################################################
-def do_Disembargo(): sudo("D", _do_Disembargo, True)
-def _do_Disembargo():
- if os.getcwd() != Cnf["Dir::Queue::Embargoed"].rstrip("/"):
- utils.fubar("Can only disembargo from %s" % Cnf["Dir::Queue::Embargoed"])
+def main():
+ global Options, Logger, Queue, changes
+ cnf = Config()
- session = DBConn().session()
+ Arguments = [('h', "Help", "Security::Options::Help"),
+ ('n', "No-Action", "Security::Options::No-Action"),
+ ('c', 'Changesfile', "Security::Options::Changesfile"),
+ ('s', "Sudo", "Security::Options::Sudo"),
+ ('A', "Approve", "Security::Options::Approve")
+ ]
- dest = Cnf["Dir::Queue::Unembargoed"]
- emb_q = get_build_queue("embargoed", session)
- une_q = get_build_queue("unembargoed", session)
-
- for c in changes:
- print "Disembargoing %s" % (c)
-
- Upload.init_vars()
- Upload.pkg.changes_file = c
- Upload.update_vars()
-
- if "source" in Upload.pkg.changes["architecture"].keys():
- print "Adding %s %s to disembargo table" % (Upload.pkg.changes["source"], Upload.pkg.changes["version"])
- session.execute("INSERT INTO disembargo (package, version) VALUES (:package, :version)",
- {'package': Upload.pkg.changes["source"], 'version': Upload.pkg.changes["version"]})
-
- files = {}
- for suite in Upload.pkg.changes["distribution"].keys():
- if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
- continue
- dest_dir = Cnf["Dir::QueueBuild"]
- if Cnf.FindB("Dinstall::SecurityQueueBuild"):
- dest_dir = os.path.join(dest_dir, suite)
- for file in Upload.pkg.files.keys():
- files[os.path.join(dest_dir, file)] = 1
-
- files = files.keys()
- for f in files:
- session.execute("UPDATE queue_build SET queue = :unembargoed WHERE filename = :filename AND queue = :embargoed",
- {'unembargoed': une_q.queue_id, 'filename': f, 'embargoed': emb_q.queue_id})
- session.commit()
-
- for file in Upload.pkg.files.keys():
- utils.copy(file, os.path.join(dest, file))
- os.unlink(file)
-
- for c in changes:
- utils.copy(c, os.path.join(dest, c))
- os.unlink(c)
- k = c[:-8] + ".dak"
- utils.copy(k, os.path.join(dest, k))
- os.unlink(k)
-
- session.commit()
-
-def do_Reject(): sudo("R", _do_Reject, True)
-def _do_Reject():
- global changes
+ for i in ["Help", "No-Action", "Changesfile", "Sudo", "Approve"]:
+ if not cnf.has_key("Security::Options::%s" % (i)):
+ cnf["Security::Options::%s" % (i)] = ""
- session = DBConn().session()
+ changes_files = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
- for c in changes:
- print "Rejecting %s..." % (c)
- Upload.init_vars()
- Upload.pkg.changes_file = c
- Upload.update_vars()
- files = {}
- for suite in Upload.pkg.changes["distribution"].keys():
- if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
- continue
- dest_dir = Cnf["Dir::QueueBuild"]
- if Cnf.FindB("Dinstall::SecurityQueueBuild"):
- dest_dir = os.path.join(dest_dir, suite)
- for file in Upload.pkg.files.keys():
- files[os.path.join(dest_dir, file)] = 1
-
- files = files.keys()
-
- aborted = Upload.do_reject()
- if not aborted:
- os.unlink(c[:-8]+".dak")
- for f in files:
- session.execute("DELETE FROM queue_build WHERE filename = :filename",
- {'filename': f})
- os.unlink(f)
-
- print "Updating buildd information..."
- spawn("/org/security.debian.org/dak/config/debian-security/cron.buildd")
-
- adv_file = "./advisory.%s" % (advisory)
- if os.path.exists(adv_file):
- os.unlink(adv_file)
-
- session.commit()
-
-def do_DropAdvisory():
- for c in changes:
- Upload.init_vars()
- Upload.pkg.changes_file = c
- Upload.update_vars()
- del Upload.pkg.changes["adv id"]
- Upload.dump_vars(os.getcwd())
- quit()
-
-def do_Edit():
- adv_file = "./advisory.%s" % (advisory)
- if not os.path.exists(adv_file):
- utils.copy(Cnf["Dir::Templates"]+"/security-install.advisory", adv_file)
- editor = os.environ.get("EDITOR", "vi")
- result = os.system("%s %s" % (editor, adv_file))
- if result != 0:
- utils.fubar("%s invocation failed for %s." % (editor, adv_file))
-
-def do_Show():
- adv_file = "./advisory.%s" % (advisory)
- if not os.path.exists(adv_file):
- adv_file = Cnf["Dir::Templates"]+"/security-install.advisory"
- print "====\n%s\n====" % (generate_advisory(adv_file))
-
-def do_Quit():
- quit()
+ Options = cnf.SubTree("Security::Options")
+ if Options['Help']:
+ usage()
-def main():
- global changes
-
- args = init()
- extras = load_args(args)
- if advisory:
- load_adv_changes()
- if extras:
- if not advisory:
- changes = extras
- else:
- if srcverarches == {}:
- if not yes_no("Create new advisory %s?" % (advisory)):
- print "Not doing anything, then"
- quit()
- else:
- advisory_info()
- doextras = []
- for c in extras:
- if yes_no("Add %s to %s?" % (c, advisory)):
- doextras.append(c)
- extras = doextras
- add_changes(extras)
-
- if not advisory:
- utils.fubar("Must specify an advisory id")
-
- if not changes:
- utils.fubar("No changes specified")
+ changesfiles={}
+ for a in changes_files:
+ if not a.endswith(".changes"):
+ utils.fubar("not a .changes file: %s" % (a))
+ changesfiles[a]=1
+ changes = changesfiles.keys()
+
+ username = utils.getusername()
+ if username != "dak":
+ print "Non-dak user: %s" % username
+ Options["Sudo"] = "y"
+
+ if Options["No-Action"]:
+ Options["Sudo"] = ""
+
+ if not Options["Sudo"] and not Options["No-Action"]:
+ Logger = daklog.Logger(cnf.Cnf, "security-install")
+
+ session = DBConn().session()
+ # If we call ourselve to approve, we do just that and exit
if Options["Approve"]:
- advisory_info()
do_Approve()
- elif Options["Reject"]:
- advisory_info()
- do_Reject()
- elif Options["Disembargo"]:
- advisory_info()
- do_Disembargo()
- elif Options["Drop-Advisory"]:
- advisory_info()
- do_DropAdvisory()
- else:
- while 1:
- default = "Q"
- opts = ["Approve", "Edit advisory"]
- if os.path.exists("./advisory.%s" % advisory):
- default = "A"
- else:
- default = "E"
- if os.getcwd() == Cnf["Dir::Queue::Embargoed"].rstrip("/"):
- opts.append("Disembargo")
- opts += ["Show advisory", "Reject", "Quit"]
-
- advisory_info()
- what = prompt(opts, default)
-
- if what == "Quit":
- do_Quit()
- elif what == "Approve":
- do_Approve()
- elif what == "Edit advisory":
- do_Edit()
- elif what == "Show advisory":
- do_Show()
- elif what == "Disembargo":
- do_Disembargo()
- elif what == "Reject":
- do_Reject()
- else:
- utils.fubar("Impossible answer '%s', wtf?" % (what))
+ sys.exit()
+
+ if len(changes) == 0:
+ utils.fubar("Need changes files as arguments")
+
+ # Yes, we could do this inside do_Approve too. But this way we see who exactly
+ # called it (ownership of the file)
+ dbchange=get_dbchange(os.path.basename(changes[0]), session)
+ # strip epoch from version
+ version=dbchange.version
+ version=version[(version.find(':')+1):]
+ acceptfilename="%s/COMMENTS/ACCEPT.%s_%s" % (os.path.dirname(os.path.abspath(changes[0])), dbchange.source, version)
+ if Options["No-Action"]:
+ print "Would create %s now and then go on to accept this package, but No-Action is set" % (acceptfilename)
+ sys.exit(0)
+ accept_file = file(acceptfilename, "w")
+ accept_file.write("OK\n")
+ accept_file.close()
+ do_Approve()
-################################################################################
if __name__ == '__main__':
main()
-
-################################################################################
Make microchanges or microqueries of the binary overrides
-h, --help show this help and exit
+ -c, --check chech override compliance
-d, --done=BUG# send priority/section change as closure to bug#
-n, --no-action don't do anything
-s, --suite specify the suite to use
"""
sys.exit(exit_code)
+def check_override_compliance(package, priority, suite, cnf, session):
+ print "Checking compliance with related overrides..."
+
+ depends = set()
+ rdepends = set()
+ components = cnf.ValueList("Suite::%s::Components" % suite)
+ arches = set([x.arch_string for x in get_suite_architectures(suite)])
+ arches -= set(["source", "all"])
+ for arch in arches:
+ for component in components:
+ Packages = utils.get_packages_from_ftp(cnf['Dir::Root'], suite, component, arch)
+ while Packages.Step():
+ package_name = Packages.Section.Find("Package")
+ dep_list = Packages.Section.Find("Depends")
+ if dep_list:
+ if package_name == package:
+ for d in apt_pkg.ParseDepends(dep_list):
+ for i in d:
+ depends.add(i[0])
+ else:
+ for d in apt_pkg.ParseDepends(dep_list):
+ for i in d:
+ if i[0] == package:
+ rdepends.add(package_name)
+
+ query = """SELECT o.package, p.level, p.priority
+ FROM override o
+ JOIN suite s ON s.id = o.suite
+ JOIN priority p ON p.id = o.priority
+ WHERE s.suite_name = '%s'
+ AND o.package in ('%s')""" \
+ % (suite, "', '".join(depends.union(rdepends)))
+ packages = session.execute(query)
+
+ excuses = []
+ for p in packages:
+ if p[0] == package or not p[1]:
+ continue
+ if p[0] in depends:
+ if priority.level < p[1]:
+ excuses.append("%s would have priority %s, its dependency %s has priority %s" \
+ % (package, priority.priority, p[0], p[2]))
+ if p[0] in rdepends:
+ if priority.level > p[1]:
+ excuses.append("%s would have priority %s, its reverse dependency %s has priority %s" \
+ % (package, priority.priority, p[0], p[2]))
+
+ if excuses:
+ for ex in excuses:
+ print ex
+ else:
+ print "Proposed override change complies with Debian Policy"
+
def main ():
cnf = Config()
Arguments = [('h',"help","Override::Options::Help"),
+ ('c',"check","Override::Options::Check"),
('d',"done","Override::Options::Done", "HasArg"),
('n',"no-action","Override::Options::No-Action"),
('s',"suite","Override::Options::Suite", "HasArg"),
]
- for i in ["help", "no-action"]:
+ for i in ["help", "check", "no-action"]:
if not cnf.has_key("Override::Options::%s" % (i)):
cnf["Override::Options::%s" % (i)] = ""
if not cnf.has_key("Override::Options::Suite"):
if oldpriority == 'source' and newpriority != 'source':
utils.fubar("Trying to change priority of a source-only package")
+ if Options["Check"] and newpriority != oldpriority:
+ check_override_compliance(package, p, suite, cnf, session)
+
# If we're in no-action mode
if Options["No-Action"]:
if newpriority != oldpriority:
--- /dev/null
+#!/usr/bin/env python
+
+"""
+Generate a list of override disparities
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2010 Luca Falavigna <dktrkranz@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+# <adsb> Yay bugzilla *sigh*
+# <phil> :)
+# <Ganneff> quick, replace the bts with it
+# * jcristau replaces dak with soyuz
+# <adsb> and expects Ganneff to look after it?
+# <jcristau> nah, elmo can do that
+# * jcristau hides
+
+################################################################################
+
+import os
+import sys
+import apt_pkg
+import yaml
+
+from daklib.config import Config
+from daklib.dbconn import *
+from daklib import utils
+
+################################################################################
+
+def usage (exit_code=0):
+ print """Generate a list of override disparities
+
+ Usage:
+ dak override-disparity [-f <file>] [ -p <package> ] [ -s <suite> ]
+
+Options:
+
+ -h, --help show this help and exit
+ -f, --file store output into given file
+ -p, --package limit check on given package only
+ -s, --suite choose suite to look for (default: unstable)"""
+
+ sys.exit(exit_code)
+
+def main():
+ cnf = Config()
+ Arguments = [('h','help','Override-Disparity::Options::Help'),
+ ('f','file','Override-Disparity::Options::File','HasArg'),
+ ('s','suite','Override-Disparity::Options::Suite','HasArg'),
+ ('p','package','Override-Disparity::Options::Package','HasArg')]
+
+ for i in ['help', 'package']:
+ if not cnf.has_key('Override-Disparity::Options::%s' % (i)):
+ cnf['Override-Disparity::Options::%s' % (i)] = ''
+ if not cnf.has_key('Override-Disparity::Options::Suite'):
+ cnf['Override-Disparity::Options::Suite'] = 'unstable'
+
+ apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
+ Options = cnf.SubTree('Override-Disparity::Options')
+
+ if Options['help']:
+ usage()
+
+ depends = {}
+ session = DBConn().session()
+ suite = Options['suite']
+ components = cnf.ValueList('Suite::%s::Components' % suite)
+ arches = set([x.arch_string for x in get_suite_architectures(suite)])
+ arches -= set(['source', 'all'])
+ for arch in arches:
+ for component in components:
+ Packages = utils.get_packages_from_ftp(cnf['Dir::Root'], suite, component, arch)
+ while Packages.Step():
+ package = Packages.Section.Find('Package')
+ dep_list = Packages.Section.Find('Depends')
+ if Options['package'] and package != Options['package']:
+ continue
+ if dep_list:
+ for d in apt_pkg.ParseDepends(dep_list):
+ for i in d:
+ if not depends.has_key(package):
+ depends[package] = set()
+ depends[package].add(i[0])
+
+ priorities = {}
+ query = """SELECT DISTINCT o.package, p.level, p.priority, m.name
+ FROM override o
+ JOIN suite s ON s.id = o.suite
+ JOIN priority p ON p.id = o.priority
+ JOIN binaries b ON b.package = o.package
+ JOIN maintainer m ON m.id = b.maintainer
+ JOIN bin_associations ba ON ba.bin = b.id
+ WHERE s.suite_name = '%s'
+ AND ba.suite = s.id
+ AND p.level <> 0""" % suite
+ packages = session.execute(query)
+
+ out = {}
+ if Options.has_key('file'):
+ outfile = file(os.path.expanduser(Options['file']), 'w')
+ else:
+ outfile = sys.stdout
+ for p in packages:
+ priorities[p[0]] = [p[1], p[2], p[3], True]
+ for d in sorted(depends.keys()):
+ for p in depends[d]:
+ if priorities.has_key(d) and priorities.has_key(p):
+ if priorities[d][0] < priorities[p][0]:
+ if priorities[d][3]:
+ if not out.has_key(d):
+ out[d] = {}
+ out[d]['priority'] = priorities[d][1]
+ out[d]['maintainer'] = priorities[d][2]
+ out[d]['priority'] = priorities[d][1]
+ priorities[d][3] = False
+ if not out[d].has_key('dependency'):
+ out[d]['dependency'] = {}
+ out[d]['dependency'][p] = priorities[p][1]
+ yaml.dump(out, outfile, default_flow_style=False)
+ if Options.has_key('file'):
+ outfile.close()
+
+if __name__ == '__main__':
+ main()
for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
lines = open("%s/%s" % (dir, comm)).readlines()
if len(lines) == 0 or lines[0] != line + "\n": continue
- changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
+ changes_files = [ x for x in os.listdir(".") if x.startswith(comm[len(opref):]+"_")
and x.endswith(".changes") ]
changes_files = sort_changes(changes_files, session)
for f in changes_files:
# The comments stuff relies on being in the right directory
os.chdir(pq.path)
do_comments(commentsdir, pq, "ACCEPT.", "ACCEPTED.", "OK", comment_accept, session)
+ do_comments(commentsdir, pq, "ACCEPTED.", "ACCEPTED.", "OK", comment_accept, session)
do_comments(commentsdir, pq, "REJECT.", "REJECTED.", "NOTOK", comment_reject, session)
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
+from shutil import copyfile
from daklib import utils
from daklib.dbconn import *
suite = get_suite(suite_name, session)
for q in suite.copy_queues:
for f in u.pkg.files.keys():
- os.symlink(os.path.join(polq.path, f), os.path.join(q.path, f))
+ copyfile(os.path.join(polq.path, f), os.path.join(q.path, f))
#
#################################################################################
#
suite = get_suite(suite_name, session)
for q in suite.copy_queues:
for f in u.pkg.files.keys():
- os.symlink(os.path.join(polq.path, f), os.path.join(q.path, f))
+ copyfile(os.path.join(polq.path, f), os.path.join(q.path, f))
################################################################################
bugs.append(bug_no)
wnpp[source] = bugs
return wnpp
+
+################################################################################
+
+def get_packages_from_ftp(root, suite, component, architecture):
+ """
+ Returns an object containing apt_pkg-parseable data collected by
+ aggregating Packages.gz files gathered for each architecture.
+
+ @type root: string
+ @param root: path to ftp archive root directory
+
+ @type suite: string
+ @param suite: suite to extract files from
+
+ @type component: string
+ @param component: component to extract files from
+
+ @type architecture: string
+ @param architecture: architecture to extract files from
+
+ @rtype: TagFile
+ @return: apt_pkg class containing package data
+
+ """
+ filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
+ (fd, temp_file) = temp_filename()
+ (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
+ if (result != 0):
+ fubar("Gunzip invocation failed!\n%s\n" % (output), result)
+ filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
+ if os.path.exists(filename):
+ (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
+ if (result != 0):
+ fubar("Gunzip invocation failed!\n%s\n" % (output), result)
+ packages = open_file(temp_file)
+ Packages = apt_pkg.ParseTagFile(packages)
+ os.unlink(temp_file)
+ return Packages
o To generate indices files:
- * dak dominate - removes obsolete packages from suites
+ * dak dominate - removes obsolete packages from suites
* dak generate-filelist - generates file lists for apt-ftparchive
* dak generate-releases - generates Release
$ssh_key_file = "";
# the incoming dir we live in
-$incoming = "/srv/queued/UploadQueue";
+$incoming = "/srv/queued/ftpmaster";
# the delayed incoming directories
$incoming_delayed = "/srv/queued/UploadQueue/DELAYED/%d-day";
# mail address of maintainer
$maintainer_mail = "ftpmaster\@debian.org";
+# to override the TO address of ALL outgoing mail, set this value.
+$overridemail = "dak\@security.debian.org";
+
# logfile rotating:
# -----------------
my $subject = shift;
my $text = shift;
+# security is special
+ $addr = 'team@security.debian.org';
+
my $package =
keys %main::packages ? join( ' ', keys %main::packages ) : "";