From: Joerg Jaspert Date: Sat, 26 Mar 2011 11:08:41 +0000 (+0100) Subject: Merge remote-tracking branch 'ansgar/p-s-from-db' into merge X-Git-Url: https://git.decadent.org.uk/gitweb/?p=dak.git;a=commitdiff_plain;h=e1156b3b857f5496a299e621d291cff0ba957d23;hp=706f80ddedb47b244c8bb819f973eeea6ba6b9f3 Merge remote-tracking branch 'ansgar/p-s-from-db' into merge * ansgar/p-s-from-db: Add order column to metadata_keys generate-packages-sources2: various bugs fixed sort generated Packages and Sources generate Packages/Sources directly from database Signed-off-by: Joerg Jaspert --- diff --git a/config/backports/cron.hourly b/config/backports/cron.hourly index 24b8f90f..fcab3b35 100755 --- a/config/backports/cron.hourly +++ b/config/backports/cron.hourly @@ -25,3 +25,19 @@ dak show-new > /dev/null $base/dak/tools/removals.pl $configdir/removalsrss.rc > $webdir/removals.rss #$scriptsdir/generate-di + +# do the buildd key updates +BUILDDFUN=$(mktemp -p "${TMPDIR}" BUILDDFUN.XXXXXX) +exec >> "${BUILDDFUN}" 2>&1 +#${scriptsdir}/buildd-remove-keys +#${scriptsdir}/buildd-add-keys +#${scriptsdir}/buildd-prepare-dir +for keyring in $(dak admin k list-binary); do + dak import-keyring --generate-users "%s" ${keyring} +done +exec >>/dev/null 2>&1 + +DATE=$(date -Is) +cat "${BUILDDFUN}" | mail -a "X-Debian: DAK" -e -s "[$(hostname -s)] Buildd key changes ${DATE}" buildd-keys@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + +rm -f "${BUILDDFUN}" diff --git a/config/debian-security/cron.hourly b/config/debian-security/cron.hourly new file mode 100755 index 00000000..ddbf09fc --- /dev/null +++ b/config/debian-security/cron.hourly @@ -0,0 +1,43 @@ +#! /bin/bash +# +# Executed hourly via cron, out of dak's crontab. + +set -e +set -u + +export SCRIPTVARS=/srv/security-master.debian.org/dak/config/debian-security/vars +. $SCRIPTVARS + +dak import-users-from-passwd +# dak queue-report -n > $webdir/new.html +# dak queue-report -8 -d new,byhand,proposedupdates,oldproposedupdates -r $webdir/stat +# dak show-deferred -r $webdir/stat > ${webdir}/deferred.html +# dak graph -n new,byhand,proposedupdates,oldproposedupdates,deferred -r $webdir/stat -i $webdir/stat -x $scriptsdir/rrd-release-freeze-dates +# dak show-new > /dev/null + +# cd $webdir +# cat removals-20*.txt > removals-full.txt +# cat removals.txt >> removals-full.txt +# cat removals-20*.822 > removals-full.822 +# cat removals.822 >> removals-full.822 + +#$base/dak/tools/queue_rss.py -q $queuedir/new -o $webdir/rss/ -d $base/misc -l $base/log/ +#$base/dak/tools/removals.pl $configdir/removalsrss.rc > $webdir/removals.rss + +#$scriptsdir/generate-di + +# do the buildd key updates +BUILDDFUN=$(mktemp -p "${TMPDIR}" BUILDDFUN.XXXXXX) +exec >> "${BUILDDFUN}" 2>&1 +#${scriptsdir}/buildd-remove-keys +#${scriptsdir}/buildd-add-keys +#${scriptsdir}/buildd-prepare-dir +for keyring in $(dak admin k list-binary); do + dak import-keyring --generate-users "%s" ${keyring} +done +exec >>/dev/null 2>&1 + +DATE=$(date -Is) +cat "${BUILDDFUN}" | mail -a "X-Debian: DAK" -e -s "[$(hostname -s)] Buildd key changes ${DATE}" buildd-keys@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + +rm -f "${BUILDDFUN}" diff --git a/config/debian/cron.hourly b/config/debian/cron.hourly index c20f4d1d..aef24c09 100755 --- a/config/debian/cron.hourly +++ b/config/debian/cron.hourly @@ -30,7 +30,26 @@ cat removals.822 >> removals-full.822 $base/dak/tools/queue_rss.py -q $queuedir/new -o $webdir/rss/ -d $base/misc -l $base/log/ $base/dak/tools/removals.pl $configdir/removalsrss.rc > $webdir/rss/removals.rss + # Tell ries to sync its tree ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org sync $scriptsdir/generate-di + + +# do the buildd key updates +BUILDDFUN=$(mktemp -p "${TMPDIR}" BUILDDFUN.XXXXXX) +exec >> "${BUILDDFUN}" 2>&1 +${scriptsdir}/buildd-remove-keys +${scriptsdir}/buildd-add-keys +${scriptsdir}/buildd-prepare-dir + +for keyring in $(dak admin k list-binary); do + dak import-keyring --generate-users "%s" ${keyring} +done +exec >>/dev/null 2>&1 + +DATE=$(date -Is) +cat "${BUILDDFUN}" | mail -a "X-Debian: DAK" -e -s "[$(hostname -s)] Buildd key changes ${DATE}" buildd-keys@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org + +rm -f "${BUILDDFUN}" diff --git a/config/homedir/syncdd.sh b/config/homedir/syncdd.sh index 652c29a0..9260e764 100755 --- a/config/homedir/syncdd.sh +++ b/config/homedir/syncdd.sh @@ -75,31 +75,33 @@ trap cleanup EXIT TERM HUP INT QUIT if lockfile -r3 ${HOME}/sync.lock; then cd $base/ rsync -aH -B8192 \ - --exclude backup/*.xz \ - --exclude backup/dump* \ + --exclude backup/*.xz \ + --exclude backup/dump* \ + --exclude database/*.db \ ${EXTRA} \ - --exclude mirror \ - --exclude morgue/ \ - --exclude=lost+found/ \ - --exclude .da-backup.trace \ - --delete \ - --delete-after \ - --timeout 3600 \ - -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \ - ftpmaster-sync:/srv/ftp-master.debian.org/ . + --exclude mirror \ + --exclude morgue/ \ + --exclude=lost+found/ \ + --exclude .da-backup.trace \ + --exclude lock/stages/ \ + --delete \ + --delete-after \ + --timeout 3600 \ + -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \ + ftpmaster-sync:/srv/ftp-master.debian.org/ . cd $public/ rsync -aH -B8192 \ - --exclude mirror \ - --exclude rsync/ \ - --exclude=lost+found/ \ - --exclude .da-backup.trace \ - --exclude web-users/ \ - --delete \ - --delete-after \ - --timeout 3600 \ - -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \ - ftpmaster-sync2:/srv/ftp.debian.org/ . + --exclude mirror \ + --exclude rsync/ \ + --exclude=lost+found/ \ + --exclude .da-backup.trace \ + --exclude web-users/ \ + --delete \ + --delete-after \ + --timeout 3600 \ + -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \ + ftpmaster-sync2:/srv/ftp.debian.org/ . else echo "Couldn't get the lock, not syncing" diff --git a/dak/admin.py b/dak/admin.py index d159651e..1dc7e7bc 100755 --- a/dak/admin.py +++ b/dak/admin.py @@ -61,6 +61,11 @@ Perform administrative work on the dak database. c db-shell show db config in a usable form for psql c NAME show option NAME as set in configuration table + keyring / k: + k list-all list all keyrings + k list-binary list all keyrings with a NULL source acl + k list-source list all keyrings with a non NULL source acl + architecture / a: a list show a list of architectures a rm ARCH remove an architecture (will only work if @@ -469,6 +474,35 @@ dispatch['c'] = show_config ################################################################################ +def show_keyring(command): + args = [str(x) for x in command] + cnf = utils.get_conf() + + die_arglen(args, 2, "E: keyring needs at least a command") + + mode = args[1].lower() + + d = DBConn() + + q = d.session().query(Keyring).filter(Keyring.active == True) + + if mode == 'list-all': + pass + elif mode == 'list-binary': + q = q.filter(Keyring.default_source_acl_id == None) + elif mode == 'list-source': + q = q.filter(Keyring.default_source_acl_id != None) + else: + die("E: keyring command unknown") + + for k in q.all(): + print k.keyring_name + +dispatch['keyring'] = show_keyring +dispatch['k'] = show_keyring + +################################################################################ + def main(): """Perform administrative work on the dak database""" global dryrun diff --git a/dak/dakdb/update55.py b/dak/dakdb/update55.py new file mode 100755 index 00000000..3328e85c --- /dev/null +++ b/dak/dakdb/update55.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# coding=utf8 + +""" +Drop unused view bin_assoc_by_arch. + +@contact: Debian FTP Master +@copyright: 2011 Torsten Werner +@license: GNU General Public License version 2 or later +""" + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +################################################################################ + +import psycopg2 +from daklib.dak_exceptions import DBUpdateError +from socket import gethostname; + +################################################################################ +def do_update(self): + """ + Drop unused view bin_assoc_by_arch. + """ + print __doc__ + try: + c = self.db.cursor() + + c.execute(""" + DROP VIEW bin_assoc_by_arch""") + + c.execute("UPDATE config SET value = '55' WHERE name = 'db_revision'") + self.db.commit() + + except psycopg2.ProgrammingError, msg: + self.db.rollback() + raise DBUpdateError, 'Unable to apply sick update 55, rollback issued. Error message : %s' % (str(msg)) diff --git a/dak/generate_filelist.py b/dak/generate_filelist.py index 1f4d6654..2a6d218b 100755 --- a/dak/generate_filelist.py +++ b/dak/generate_filelist.py @@ -5,6 +5,7 @@ Generate file lists for apt-ftparchive. @contact: Debian FTP Master @copyright: 2009 Torsten Werner +@copyright: 2011 Ansgar Burchardt @license: GNU General Public License version 2 or later """ @@ -37,8 +38,8 @@ Generate file lists for apt-ftparchive. from daklib.dbconn import * from daklib.config import Config -from daklib.threadpool import ThreadPool -from daklib import utils +from daklib import utils, daklog +from multiprocessing import Pool import apt_pkg, os, stat, sys from daklib.lists import getSources, getBinaries, getArchAll @@ -64,37 +65,48 @@ def listPath(suite, component, architecture = None, type = None, file.truncate() return (file, timestamp) -def writeSourceList(args): - (suite, component, incremental_mode) = args +def writeSourceList(suite_id, component_id, incremental_mode): + session = DBConn().session() + suite = Suite.get(suite_id, session) + component = Component.get(component_id, session) (file, timestamp) = listPath(suite, component, incremental_mode = incremental_mode) - session = DBConn().session() + for _, filename in getSources(suite, component, session, timestamp): file.write(filename + '\n') session.close() file.close() + return "sources list for %s %s" % (suite.suite_name, component.component_name) -def writeAllList(args): - (suite, component, architecture, type, incremental_mode) = args +def writeAllList(suite_id, component_id, architecture_id, type, incremental_mode): + session = DBConn().session() + suite = Suite.get(suite_id, session) + component = Component.get(component_id, session) + architecture = Architecture.get(architecture_id, session) (file, timestamp) = listPath(suite, component, architecture, type, incremental_mode) - session = DBConn().session() + for _, filename in getArchAll(suite, component, architecture, type, session, timestamp): file.write(filename + '\n') session.close() file.close() + return "all list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type) -def writeBinaryList(args): - (suite, component, architecture, type, incremental_mode) = args +def writeBinaryList(suite_id, component_id, architecture_id, type, incremental_mode): + session = DBConn().session() + suite = Suite.get(suite_id, session) + component = Component.get(component_id, session) + architecture = Architecture.get(architecture_id, session) (file, timestamp) = listPath(suite, component, architecture, type, incremental_mode) - session = DBConn().session() + for _, filename in getBinaries(suite, component, architecture, type, session, timestamp): file.write(filename + '\n') session.close() file.close() + return "binary list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type) def usage(): print """Usage: dak generate_filelist [OPTIONS] @@ -114,6 +126,7 @@ Incremental mode appends only newer files to existing lists.""" def main(): cnf = Config() + Logger = daklog.Logger(cnf, 'generate-filelist') Arguments = [('h', "help", "Filelist::Options::Help"), ('s', "suite", "Filelist::Options::Suite", "HasArg"), ('c', "component", "Filelist::Options::Component", "HasArg"), @@ -140,36 +153,44 @@ def main(): Options = cnf.SubTree("Filelist::Options") if Options['Help']: usage() - threadpool = ThreadPool() + pool = Pool() query_suites = query_suites. \ filter(Suite.suite_name.in_(utils.split_args(Options['Suite']))) query_components = query_components. \ filter(Component.component_name.in_(utils.split_args(Options['Component']))) query_architectures = query_architectures. \ filter(Architecture.arch_string.in_(utils.split_args(Options['Architecture']))) + + def log(message): + Logger.log([message]) + for suite in query_suites: + suite_id = suite.suite_id for component in query_components: + component_id = component.component_id for architecture in query_architectures: + architecture_id = architecture.arch_id if architecture not in suite.architectures: pass elif architecture.arch_string == 'source': - threadpool.queueTask(writeSourceList, - (suite, component, Options['Incremental'])) + pool.apply_async(writeSourceList, + (suite_id, component_id, Options['Incremental']), callback=log) elif architecture.arch_string == 'all': - threadpool.queueTask(writeAllList, - (suite, component, architecture, 'deb', - Options['Incremental'])) - threadpool.queueTask(writeAllList, - (suite, component, architecture, 'udeb', - Options['Incremental'])) + pool.apply_async(writeAllList, + (suite_id, component_id, architecture_id, 'deb', + Options['Incremental']), callback=log) + pool.apply_async(writeAllList, + (suite_id, component_id, architecture_id, 'udeb', + Options['Incremental']), callback=log) else: # arch any - threadpool.queueTask(writeBinaryList, - (suite, component, architecture, 'deb', - Options['Incremental'])) - threadpool.queueTask(writeBinaryList, - (suite, component, architecture, 'udeb', - Options['Incremental'])) - threadpool.joinAll() + pool.apply_async(writeBinaryList, + (suite_id, component_id, architecture_id, 'deb', + Options['Incremental']), callback=log) + pool.apply_async(writeBinaryList, + (suite_id, component_id, architecture_id, 'udeb', + Options['Incremental']), callback=log) + pool.close() + pool.join() # this script doesn't change the database session.close() diff --git a/dak/generate_releases.py b/dak/generate_releases.py index e67bd91f..b21f30a5 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -138,9 +138,9 @@ class ReleaseWriter(object): ('Codename', 'codename') ) # A "Sub" Release file has slightly different fields - subattribs = ( ('Origin', 'origin'), + subattribs = ( ('Archive', 'suite_name'), + ('Origin', 'origin'), ('Label', 'label'), - ('Archive', 'suite_name'), ('Version', 'version') ) # Boolean stuff. If we find it true in database, write out "yes" into the release file @@ -182,12 +182,12 @@ class ReleaseWriter(object): out.write("Description: %s\n" % suite.description) for comp in components: - for dirpath, dirnames, filenames in os.walk("%sdists/%s/%s" % (cnf["Dir::Root"], suite.suite_name, comp), topdown=True): + for dirpath, dirnames, filenames in os.walk("%sdists/%s/%s%s" % (cnf["Dir::Root"], suite.suite_name, suite_suffix, comp), topdown=True): if not re_gensubrelease.match(dirpath): continue subfile = os.path.join(dirpath, "Release") - subrel = open(subfile, "w") + subrel = open(subfile + '.new', "w") for key, dbfield in subattribs: if getattr(suite, dbfield) is not None: @@ -198,8 +198,18 @@ class ReleaseWriter(object): subrel.write("%s: yes\n" % (key)) subrel.write("Component: %s%s\n" % (suite_suffix, comp)) + + # Urgh, but until we have all the suite/component/arch stuff in the DB, + # this'll have to do + arch = os.path.split(dirpath)[-1] + if arch.startswith('binary-'): + arch = arch[7:] + + subrel.write("Architecture: %s\n" % (arch)) subrel.close() + os.rename(subfile + '.new', subfile) + # Now that we have done the groundwork, we want to get off and add the files with # their checksums to the main Release file oldcwd = os.getcwd() diff --git a/dak/update_db.py b/dak/update_db.py index 88ff20f5..e09f3ad4 100755 --- a/dak/update_db.py +++ b/dak/update_db.py @@ -46,7 +46,7 @@ from daklib.daklog import Logger ################################################################################ Cnf = None -required_database_schema = 54 +required_database_schema = 55 ################################################################################ diff --git a/daklib/config.py b/daklib/config.py index 9993ec3a..ed8cf1d0 100755 --- a/daklib/config.py +++ b/daklib/config.py @@ -39,12 +39,9 @@ default_config = "/etc/dak/dak.conf" #: default dak config, defines host propert # suppress some deprecation warnings in squeeze related to apt_pkg # module import warnings -warnings.filterwarnings('ignore', \ - "Attribute '.*' of the 'apt_pkg\.Configuration' object is deprecated, use '.*' instead\.", \ - DeprecationWarning) -warnings.filterwarnings('ignore', \ - "apt_pkg\.newConfiguration\(\) is deprecated\. Use apt_pkg\.Configuration\(\) instead\.", \ - DeprecationWarning) +warnings.filterwarnings('ignore', ".*apt_pkg.* is deprecated.*", DeprecationWarning) + +################################################################################ def which_conf_file(): return os.getenv("DAK_CONFIG", default_config) diff --git a/daklib/contents.py b/daklib/contents.py index f3077aab..449fb88e 100755 --- a/daklib/contents.py +++ b/daklib/contents.py @@ -285,7 +285,7 @@ def binary_helper(suite_id, arch_id, overridetype_id, component_id = None): This function is called in a new subprocess and multiprocessing wants a top level function. ''' - session = DBConn().session() + session = DBConn().session(work_mem = 1000) suite = Suite.get(suite_id, session) architecture = Architecture.get(arch_id, session) overridetype = OverrideType.get(overridetype_id, session) @@ -304,7 +304,7 @@ def source_helper(suite_id, component_id): This function is called in a new subprocess and multiprocessing wants a top level function. ''' - session = DBConn().session() + session = DBConn().session(work_mem = 1000) suite = Suite.get(suite_id, session) component = Component.get(component_id, session) log_message = [suite.suite_name, 'source', component.component_name] diff --git a/daklib/dbconn.py b/daklib/dbconn.py index ae5a9e0d..6948cf6c 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -3219,7 +3219,6 @@ class DBConn(object): 'almost_obsolete_all_associations', 'almost_obsolete_src_associations', 'any_associations_source', - 'bin_assoc_by_arch', 'bin_associations_binaries', 'binaries_suite_arch', 'binfiles_suite_component_arch', @@ -3591,12 +3590,21 @@ class DBConn(object): self.__setupmappers() self.pid = os.getpid() - def session(self): + def session(self, work_mem = 0): + ''' + Returns a new session object. If a work_mem parameter is provided a new + transaction is started and the work_mem parameter is set for this + transaction. The work_mem parameter is measured in MB. A default value + will be used if the parameter is not set. + ''' # reinitialize DBConn in new processes if self.pid != os.getpid(): clear_mappers() self.__createconn() - return self.db_smaker() + session = self.db_smaker() + if work_mem > 0: + session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem) + return session __all__.append('DBConn') diff --git a/daklib/queue.py b/daklib/queue.py index b652f844..ef781f19 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -56,16 +56,6 @@ from textutils import fix_maintainer from lintian import parse_lintian_output, generate_reject_messages from contents import UnpackedSource -# suppress some deprecation warnings in squeeze related to apt_pkg -# module -import warnings -warnings.filterwarnings('ignore', \ - "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \ - DeprecationWarning) -warnings.filterwarnings('ignore', \ - "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \ - DeprecationWarning) - ############################################################################### def get_type(f, session): diff --git a/scripts/debian/buildd-add-keys b/scripts/debian/buildd-add-keys index ddb56a42..1283838f 100755 --- a/scripts/debian/buildd-add-keys +++ b/scripts/debian/buildd-add-keys @@ -51,12 +51,12 @@ function cleanup() { done exit $ERRVAL } -trap cleanup ERR EXIT TERM HUP INT QUIT base="${base}/scripts/builddkeyrings" INCOMING="${base}/incoming" ERRORS="${base}/errors" ADMINS="${base}/adminkeys.gpg" +STAMPFILE="${base}/updatedkeyring" # Default options for our gpg calls DEFGPGOPT="--no-default-keyring --batch --no-tty --no-options --exit-on-status-write-error --no-greeting" @@ -66,16 +66,27 @@ if ! [ -d "${INCOMING}" ]; then exit 1 fi +cd "${INCOMING}" +KEYS=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.key | sed -e "s,./,," | xargs) +if [ -z "${KEYS}" ]; then + exit 0 +fi + +trap cleanup ERR EXIT TERM HUP INT QUIT + +# Tell prepare-dir that there is an update and it can run +touch "${STAMPFILE}" + # Whenever something goes wrong, its put in there. mkdir -p "${ERRORS}" # We process all new files in our incoming directory -for file in $(ls -1 ${INCOMING}/*.key); do +for file in ${KEYS}; do file=${file##*/} # First we want to see if we recognize the filename. The buildd people have # to follow a certain schema: - # architecture_builddname.YEAR-MONTH-DAY_HOUR:MINUTE.key - if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}:[0-9]{2}).key ]]; then + # architecture_builddname.YEAR-MONTH-DAY_HOURMINUTE.key + if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}[0-9]{2}).key ]]; then ARCH=${BASH_REMATCH[1]} BUILDD=${BASH_REMATCH[2]} # Right now timestamp is unused @@ -222,7 +233,7 @@ for file in $(ls -1 ${INCOMING}/*.key); do # We need to check for the amount of keys ARCHKEYRING="${base}/${ARCH}/keyring.gpg" - KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys "buildd_${ARCH}-${BUILDD}@buildd.debian.org" | grep -c '^pub:') + KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys "buildd_${ARCH}-${BUILDD}@buildd.debian.org" | grep -c '^pub:' || /bin/true ) if [ ${KEYNO} -gt 2 ]; then DATE=$(date -Is) mv "${INCOMING}/${file}" "${ERRORS}/toomany.${file}.${DATE}" diff --git a/scripts/debian/buildd-prepare-dir b/scripts/debian/buildd-prepare-dir index e0f6053a..df4b098e 100755 --- a/scripts/debian/buildd-prepare-dir +++ b/scripts/debian/buildd-prepare-dir @@ -28,8 +28,6 @@ set -E export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars . $SCRIPTVARS -umask 027 - # And use one locale, no matter what the caller has set export LANG=C export LC_ALL=C @@ -40,23 +38,28 @@ PROGRAM="buildd-prepare-dir" # should be relative to the general base dir later COPYTARGET="${base}/keyrings" +GNUPGHOME="${base}/s3kr1t/dot-gnupg" base="${base}/scripts/builddkeyrings" TARGET="${base}/keyrings" REMOVED="${base}/removed-buildd-keys.gpg" +STAMPFILE="${base}/updatedkeyring" -mkdir -p "${TARGET}/keyrings" +mkdir -p "${TARGET}/buildd-keyrings" -for arch in $archs; do - if [ -f ${base}/${arch}/keyring.gpg ]; then - cp -al ${base}/${arch}/keyring.gpg ${TARGET}/keyrings/buildd-${arch}-keyring.gpg - chmod 0644 ${TARGET}/keyrings/buildd-${arch}-keyring.gpg - fi -done +if [ -f "${STAMPFILE}" ]; then + rm -f "${STAMPFILE}" + for arch in $archs; do + if [ -f ${base}/${arch}/keyring.gpg ]; then + cp -afl ${base}/${arch}/keyring.gpg ${TARGET}/buildd-keyrings/buildd-${arch}-keyring.gpg + chmod 0644 ${TARGET}/buildd-keyrings/buildd-${arch}-keyring.gpg + fi + done -cd ${TARGET} -sha512sum keyrings/* > sha512sums + cd ${TARGET} + sha512sum buildd-keyrings/* > sha512sums -rm -f ${TARGET}/sha512sums.txt -SIGNINGKEY=$(dak admin c signingkeyids) -gpg --no-options --batch --no-tty --armour --default-key ${SIGNINKEY} --clearsign -o "${TARGET}/sha512sums.txt" "${TARGET}/sha512sums" -rm -f ${TARGET}/sha512sums + rm -f ${TARGET}/sha512sums.txt + SIGNINGKEY=B1326A8D + GNUPGHOME=${GNUPGHOME} gpg --no-options --batch --no-tty --armour --default-key ${SIGNINGKEY} --clearsign -o "${TARGET}/sha512sums.txt" "${TARGET}/sha512sums" + rm -f ${TARGET}/sha512sums +fi diff --git a/scripts/debian/buildd-remove-keys b/scripts/debian/buildd-remove-keys index c07ff04b..3591785d 100755 --- a/scripts/debian/buildd-remove-keys +++ b/scripts/debian/buildd-remove-keys @@ -51,13 +51,13 @@ function cleanup() { done exit $ERRVAL } -trap cleanup ERR EXIT TERM HUP INT QUIT base="${base}/scripts/builddkeyrings" INCOMING="${base}/incoming" ERRORS="${base}/errors" ADMINS="${base}/adminkeys.gpg" REMOVED="${base}/removed-buildd-keys.gpg" +STAMPFILE="${base}/updatedkeyring" # Default options for our gpg calls DEFGPGOPT="--no-default-keyring --batch --no-tty --no-options --exit-on-status-write-error --no-greeting" @@ -67,16 +67,27 @@ if ! [ -d "${INCOMING}" ]; then exit 1 fi +cd "${INCOMING}" +KEYS=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.del | sed -e "s,./,," | xargs) +if [ -z "${KEYS}" ]; then + exit 0 +fi + +trap cleanup ERR EXIT TERM HUP INT QUIT + +# Tell prepare-dir that there is an update and it can run +touch "${STAMPFILE}" + # Whenever something goes wrong, its put in there. mkdir -p "${ERRORS}" # We process all new files in our incoming directory -for file in $(ls -1 ${INCOMING}/*.del ); do +for file in ${KEYS}; do file=${file##*/} # First we want to see if we recognize the filename. The buildd people have # to follow a certain schema: - # architecture_builddname.YEAR-MONTH-DAY_HOUR:MINUTE.del - if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}:[0-9]{2}).del ]]; then + # architecture_builddname.YEAR-MONTH-DAY_HOURMINUTE.del + if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}[0-9]{2}).del ]]; then ARCH=${BASH_REMATCH[1]} BUILDD=${BASH_REMATCH[2]} # Right now timestamp is unused @@ -172,7 +183,7 @@ for file in $(ls -1 ${INCOMING}/*.del ); do ARCHKEYRING="${base}/${ARCH}/keyring.gpg" # Is the key in there? - KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys ${KEYID} | grep -c '^pub:') + KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys ${KEYID} | grep -c '^pub:' || /bin/true ) if [ $KEYNO -eq 1 ]; then # Right, exactly one there, lets get rid of it diff --git a/scripts/debian/ddtp-i18n-check.sh b/scripts/debian/ddtp-i18n-check.sh index cefb68e6..f894f1cc 100755 --- a/scripts/debian/ddtp-i18n-check.sh +++ b/scripts/debian/ddtp-i18n-check.sh @@ -1,8 +1,8 @@ #!/bin/bash # -# $Id: ddtp_i18n_check.sh 1670 2009-03-31 20:57:49Z nekral-guest $ +# $Id: ddtp_i18n_check.sh 2535 2011-02-19 14:20:52Z nekral-guest $ # -# Copyright (C) 2008, Felipe Augusto van de Wiel +# Copyright (C) 2008, 2011 Felipe Augusto van de Wiel # Copyright (C) 2008, 2009 Nicolas François # # This program is free software; you can redistribute it and/or modify @@ -25,6 +25,10 @@ DEBUG=0 # files. DRY_RUN=0 +# When GEN_IDX=1, we create the Index files. There is a runtime option +# to not create/generate the Index file. +GEN_IDX=1 + dists_parent_dir="" # If no argument indicates the PACKAGES_LISTS_DIR then use '.' PACKAGES_LISTS_DIR="" @@ -35,6 +39,7 @@ usage () { echo " --debug Debug mode: do not stop after the first error" >&2 echo " --dry-run Do not generate the compressed version of the " >&2 echo " Translation files">&2 + echo " --no-index Do not generate the Index files" >&2 exit 1 } @@ -47,6 +52,9 @@ for opt; do "--dry-run") DRY_RUN=1 ;; + "--no-index") + GEN_IDX=0 + ;; "-*") usage ;; @@ -80,21 +88,21 @@ elif [ ! -d "$PACKAGES_LISTS_DIR" ]; then usage fi -#STABLE="lenny" +#STABLE="squeeze" TESTING="wheezy" UNSTABLE="sid" # Original SHA256SUMS, generated by i18n.debian.net -SHA256SUMS="SHA256SUMS" +CHECKSUMS="SHA256SUMS" # DAK Timestamp TIMESTAMP="timestamp" # These special files must exist on the top of dists_parent_dir -SPECIAL_FILES="$SHA256SUMS $TIMESTAMP $TIMESTAMP.gpg" +SPECIAL_FILES="$CHECKSUMS $TIMESTAMP $TIMESTAMP.gpg" # Temporary working directory. We need a full path to reduce the -# complexity of checking SHA256SUMS and cleaning/removing TMPDIR +# complexity of checking CHECKSUMS and cleaning/removing TMPDIR TEMP_WORK_DIR=$(mktemp -d -t ddtp_dinstall_tmpdir.XXXXXX) cd "$TEMP_WORK_DIR" TMP_WORK_DIR=$(pwd) @@ -105,6 +113,7 @@ unset TEMP_WORK_DIR trap_exit () { rm -rf "$TMP_WORK_DIR" rm -f "$dists_parent_dir"/dists/*/main/i18n/Translation-*.bz2 + rm -f "$dists_parent_dir"/dists/*/main/i18n/Index exit 1 } trap trap_exit EXIT HUP INT QUIT TERM @@ -315,18 +324,18 @@ for sf in $SPECIAL_FILES; do fi done -# Comparing SHA256SUMS -# We don use -c because a file could exist in the directory tree and not in -# the SHA256SUMS, so we sort the existing SHA256SUMS and we create a new one +# Comparing CHECKSUMS +# We don't use -c because a file could exist in the directory tree and not in +# the CHECKSUMS, so we sort the existing CHECKSUMS and we create a new one # already sorted, if cmp fails then files are different and we don't want to # continue. cd "$dists_parent_dir" -find dists -type f -print0 |xargs --null sha256sum > "$TMP_WORK_DIR/$SHA256SUMS.new" -sort "$SHA256SUMS" > "$TMP_WORK_DIR/$SHA256SUMS.sorted" -sort "$TMP_WORK_DIR/$SHA256SUMS.new" > "$TMP_WORK_DIR/$SHA256SUMS.new.sorted" -if ! cmp --quiet "$TMP_WORK_DIR/$SHA256SUMS.sorted" "$TMP_WORK_DIR/$SHA256SUMS.new.sorted"; then - echo "Failed to compare the SHA256SUMS, they are not identical!" >&2 - diff -au "$TMP_WORK_DIR/$SHA256SUMS.sorted" "$TMP_WORK_DIR/$SHA256SUMS.new.sorted" >&2 +find dists -type f -print0 |xargs --null sha256sum > "$TMP_WORK_DIR/$CHECKSUMS.new" +sort "$CHECKSUMS" > "$TMP_WORK_DIR/$CHECKSUMS.sorted" +sort "$TMP_WORK_DIR/$CHECKSUMS.new" > "$TMP_WORK_DIR/$CHECKSUMS.new.sorted" +if ! cmp --quiet "$TMP_WORK_DIR/$CHECKSUMS.sorted" "$TMP_WORK_DIR/$CHECKSUMS.new.sorted"; then + echo "Failed to compare the $CHECKSUMS, they are not identical!" >&2 + diff -au "$TMP_WORK_DIR/$CHECKSUMS.sorted" "$TMP_WORK_DIR/$CHECKSUMS.new.sorted" >&2 exit 1 fi cd "$OLDPWD" @@ -346,6 +355,14 @@ while read f; do if ! is_dirname_okay "$f"; then echo "Wrong directory name: $f" >&2 exit 1 + else + # If the directory name is OK, and if it's name is i18n + # and GEN_IDX is enabled, we generate the header of the + # Index file + if [ "$(basename $f)" = "i18n" -a "$GEN_IDX" = "1" ]; + then + echo "SHA1:" > "$f/Index" + fi fi elif [ -f "$f" ]; then # If $f is in $SPECIAL_FILES, we skip to the next loop because @@ -393,6 +410,17 @@ while read f; do # Now generate the compressed files bzip2 "$f" fi + + # Create Index + if [ "$GEN_IDX" = "1" ]; then + fbz=${f}.bz2 + IDX=$(dirname $f) + tf_name=$(basename $fbz) + tf_sha1=$(sha1sum $fbz) + tf_size=$(du $fbz) + printf ' %s % 7s %s\n' "${tf_sha1% *}" \ + "${tf_size% *}" "${tf_name}" >> "$IDX/Index" + fi else echo "Neither a file or directory: $f" >&2 exit 1 diff --git a/tools/debianqueued-0.9/config b/tools/debianqueued-0.9/config index 16c482a0..e4d3caec 100644 --- a/tools/debianqueued-0.9/config +++ b/tools/debianqueued-0.9/config @@ -64,7 +64,22 @@ $statusdelay = 30; # names of the keyring files @keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg", - "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg" ); + "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg"); # our log file $logfile = "$queued_dir/log"; diff --git a/tools/debianqueued-0.9/config-backports b/tools/debianqueued-0.9/config-backports index f04e925d..1c9e0c9a 100644 --- a/tools/debianqueued-0.9/config-backports +++ b/tools/debianqueued-0.9/config-backports @@ -63,7 +63,22 @@ $statusfile = "$incoming/status"; $statusdelay = 30; # names of the keyring files -@keyrings = ( "/srv/backports-master.debian.org/keyrings/keyring.gpg" ); +@keyrings = ( "/srv/backports-master.debian.org/keyrings/keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg"); # our log file $logfile = "$queued_dir/log"; diff --git a/tools/debianqueued-0.9/config-security b/tools/debianqueued-0.9/config-security index fe00f0d4..1806a114 100644 --- a/tools/debianqueued-0.9/config-security +++ b/tools/debianqueued-0.9/config-security @@ -63,7 +63,22 @@ $statusfile = "$incoming/status"; $statusdelay = 30; # names of the keyring files -@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg" ); +@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg"); # our log file $logfile = "$queued_dir/log"; diff --git a/tools/debianqueued-0.9/config-upload b/tools/debianqueued-0.9/config-upload index 77cc90d7..d6071278 100644 --- a/tools/debianqueued-0.9/config-upload +++ b/tools/debianqueued-0.9/config-upload @@ -64,7 +64,22 @@ $statusdelay = 30; # names of the keyring files @keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg", - "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg" ); + "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg", + "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg"); # our log file $logfile = "$queued_dir/log";