$base/dak/tools/removals.pl $configdir/removalsrss.rc > $webdir/removals.rss
#$scriptsdir/generate-di
+
+# do the buildd key updates
+BUILDDFUN=$(mktemp -p "${TMPDIR}" BUILDDFUN.XXXXXX)
+exec >> "${BUILDDFUN}" 2>&1
+#${scriptsdir}/buildd-remove-keys
+#${scriptsdir}/buildd-add-keys
+#${scriptsdir}/buildd-prepare-dir
+for keyring in $(dak admin k list-binary); do
+ dak import-keyring --generate-users "%s" ${keyring}
+done
+exec >>/dev/null 2>&1
+
+DATE=$(date -Is)
+cat "${BUILDDFUN}" | mail -a "X-Debian: DAK" -e -s "[$(hostname -s)] Buildd key changes ${DATE}" buildd-keys@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
+
+rm -f "${BUILDDFUN}"
--- /dev/null
+#! /bin/bash
+#
+# Executed hourly via cron, out of dak's crontab.
+
+set -e
+set -u
+
+export SCRIPTVARS=/srv/security-master.debian.org/dak/config/debian-security/vars
+. $SCRIPTVARS
+
+dak import-users-from-passwd
+# dak queue-report -n > $webdir/new.html
+# dak queue-report -8 -d new,byhand,proposedupdates,oldproposedupdates -r $webdir/stat
+# dak show-deferred -r $webdir/stat > ${webdir}/deferred.html
+# dak graph -n new,byhand,proposedupdates,oldproposedupdates,deferred -r $webdir/stat -i $webdir/stat -x $scriptsdir/rrd-release-freeze-dates
+# dak show-new > /dev/null
+
+# cd $webdir
+# cat removals-20*.txt > removals-full.txt
+# cat removals.txt >> removals-full.txt
+# cat removals-20*.822 > removals-full.822
+# cat removals.822 >> removals-full.822
+
+#$base/dak/tools/queue_rss.py -q $queuedir/new -o $webdir/rss/ -d $base/misc -l $base/log/
+#$base/dak/tools/removals.pl $configdir/removalsrss.rc > $webdir/removals.rss
+
+#$scriptsdir/generate-di
+
+# do the buildd key updates
+BUILDDFUN=$(mktemp -p "${TMPDIR}" BUILDDFUN.XXXXXX)
+exec >> "${BUILDDFUN}" 2>&1
+#${scriptsdir}/buildd-remove-keys
+#${scriptsdir}/buildd-add-keys
+#${scriptsdir}/buildd-prepare-dir
+for keyring in $(dak admin k list-binary); do
+ dak import-keyring --generate-users "%s" ${keyring}
+done
+exec >>/dev/null 2>&1
+
+DATE=$(date -Is)
+cat "${BUILDDFUN}" | mail -a "X-Debian: DAK" -e -s "[$(hostname -s)] Buildd key changes ${DATE}" buildd-keys@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
+
+rm -f "${BUILDDFUN}"
$base/dak/tools/queue_rss.py -q $queuedir/new -o $webdir/rss/ -d $base/misc -l $base/log/
$base/dak/tools/removals.pl $configdir/removalsrss.rc > $webdir/rss/removals.rss
+
# Tell ries to sync its tree
ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org sync
$scriptsdir/generate-di
+
+
+# do the buildd key updates
+BUILDDFUN=$(mktemp -p "${TMPDIR}" BUILDDFUN.XXXXXX)
+exec >> "${BUILDDFUN}" 2>&1
+${scriptsdir}/buildd-remove-keys
+${scriptsdir}/buildd-add-keys
+${scriptsdir}/buildd-prepare-dir
+
+for keyring in $(dak admin k list-binary); do
+ dak import-keyring --generate-users "%s" ${keyring}
+done
+exec >>/dev/null 2>&1
+
+DATE=$(date -Is)
+cat "${BUILDDFUN}" | mail -a "X-Debian: DAK" -e -s "[$(hostname -s)] Buildd key changes ${DATE}" buildd-keys@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
+
+rm -f "${BUILDDFUN}"
if lockfile -r3 ${HOME}/sync.lock; then
cd $base/
rsync -aH -B8192 \
- --exclude backup/*.xz \
- --exclude backup/dump* \
+ --exclude backup/*.xz \
+ --exclude backup/dump* \
+ --exclude database/*.db \
${EXTRA} \
- --exclude mirror \
- --exclude morgue/ \
- --exclude=lost+found/ \
- --exclude .da-backup.trace \
- --delete \
- --delete-after \
- --timeout 3600 \
- -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
- ftpmaster-sync:/srv/ftp-master.debian.org/ .
+ --exclude mirror \
+ --exclude morgue/ \
+ --exclude=lost+found/ \
+ --exclude .da-backup.trace \
+ --exclude lock/stages/ \
+ --delete \
+ --delete-after \
+ --timeout 3600 \
+ -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
+ ftpmaster-sync:/srv/ftp-master.debian.org/ .
cd $public/
rsync -aH -B8192 \
- --exclude mirror \
- --exclude rsync/ \
- --exclude=lost+found/ \
- --exclude .da-backup.trace \
- --exclude web-users/ \
- --delete \
- --delete-after \
- --timeout 3600 \
- -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
- ftpmaster-sync2:/srv/ftp.debian.org/ .
+ --exclude mirror \
+ --exclude rsync/ \
+ --exclude=lost+found/ \
+ --exclude .da-backup.trace \
+ --exclude web-users/ \
+ --delete \
+ --delete-after \
+ --timeout 3600 \
+ -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
+ ftpmaster-sync2:/srv/ftp.debian.org/ .
else
echo "Couldn't get the lock, not syncing"
c db-shell show db config in a usable form for psql
c NAME show option NAME as set in configuration table
+ keyring / k:
+ k list-all list all keyrings
+ k list-binary list all keyrings with a NULL source acl
+ k list-source list all keyrings with a non NULL source acl
+
architecture / a:
a list show a list of architectures
a rm ARCH remove an architecture (will only work if
################################################################################
+def show_keyring(command):
+ args = [str(x) for x in command]
+ cnf = utils.get_conf()
+
+ die_arglen(args, 2, "E: keyring needs at least a command")
+
+ mode = args[1].lower()
+
+ d = DBConn()
+
+ q = d.session().query(Keyring).filter(Keyring.active == True)
+
+ if mode == 'list-all':
+ pass
+ elif mode == 'list-binary':
+ q = q.filter(Keyring.default_source_acl_id == None)
+ elif mode == 'list-source':
+ q = q.filter(Keyring.default_source_acl_id != None)
+ else:
+ die("E: keyring command unknown")
+
+ for k in q.all():
+ print k.keyring_name
+
+dispatch['keyring'] = show_keyring
+dispatch['k'] = show_keyring
+
+################################################################################
+
def main():
"""Perform administrative work on the dak database"""
global dryrun
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Drop unused view bin_assoc_by_arch.
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2011 Torsten Werner <twerner@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from socket import gethostname;
+
+################################################################################
+def do_update(self):
+ """
+ Drop unused view bin_assoc_by_arch.
+ """
+ print __doc__
+ try:
+ c = self.db.cursor()
+
+ c.execute("""
+ DROP VIEW bin_assoc_by_arch""")
+
+ c.execute("UPDATE config SET value = '55' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, 'Unable to apply sick update 55, rollback issued. Error message : %s' % (str(msg))
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2009 Torsten Werner <twerner@debian.org>
+@copyright: 2011 Ansgar Burchardt <ansgar@debian.org>
@license: GNU General Public License version 2 or later
"""
from daklib.dbconn import *
from daklib.config import Config
-from daklib.threadpool import ThreadPool
-from daklib import utils
+from daklib import utils, daklog
+from multiprocessing import Pool
import apt_pkg, os, stat, sys
from daklib.lists import getSources, getBinaries, getArchAll
file.truncate()
return (file, timestamp)
-def writeSourceList(args):
- (suite, component, incremental_mode) = args
+def writeSourceList(suite_id, component_id, incremental_mode):
+ session = DBConn().session()
+ suite = Suite.get(suite_id, session)
+ component = Component.get(component_id, session)
(file, timestamp) = listPath(suite, component,
incremental_mode = incremental_mode)
- session = DBConn().session()
+
for _, filename in getSources(suite, component, session, timestamp):
file.write(filename + '\n')
session.close()
file.close()
+ return "sources list for %s %s" % (suite.suite_name, component.component_name)
-def writeAllList(args):
- (suite, component, architecture, type, incremental_mode) = args
+def writeAllList(suite_id, component_id, architecture_id, type, incremental_mode):
+ session = DBConn().session()
+ suite = Suite.get(suite_id, session)
+ component = Component.get(component_id, session)
+ architecture = Architecture.get(architecture_id, session)
(file, timestamp) = listPath(suite, component, architecture, type,
incremental_mode)
- session = DBConn().session()
+
for _, filename in getArchAll(suite, component, architecture, type,
session, timestamp):
file.write(filename + '\n')
session.close()
file.close()
+ return "all list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type)
-def writeBinaryList(args):
- (suite, component, architecture, type, incremental_mode) = args
+def writeBinaryList(suite_id, component_id, architecture_id, type, incremental_mode):
+ session = DBConn().session()
+ suite = Suite.get(suite_id, session)
+ component = Component.get(component_id, session)
+ architecture = Architecture.get(architecture_id, session)
(file, timestamp) = listPath(suite, component, architecture, type,
incremental_mode)
- session = DBConn().session()
+
for _, filename in getBinaries(suite, component, architecture, type,
session, timestamp):
file.write(filename + '\n')
session.close()
file.close()
+ return "binary list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type)
def usage():
print """Usage: dak generate_filelist [OPTIONS]
def main():
cnf = Config()
+ Logger = daklog.Logger(cnf, 'generate-filelist')
Arguments = [('h', "help", "Filelist::Options::Help"),
('s', "suite", "Filelist::Options::Suite", "HasArg"),
('c', "component", "Filelist::Options::Component", "HasArg"),
Options = cnf.SubTree("Filelist::Options")
if Options['Help']:
usage()
- threadpool = ThreadPool()
+ pool = Pool()
query_suites = query_suites. \
filter(Suite.suite_name.in_(utils.split_args(Options['Suite'])))
query_components = query_components. \
filter(Component.component_name.in_(utils.split_args(Options['Component'])))
query_architectures = query_architectures. \
filter(Architecture.arch_string.in_(utils.split_args(Options['Architecture'])))
+
+ def log(message):
+ Logger.log([message])
+
for suite in query_suites:
+ suite_id = suite.suite_id
for component in query_components:
+ component_id = component.component_id
for architecture in query_architectures:
+ architecture_id = architecture.arch_id
if architecture not in suite.architectures:
pass
elif architecture.arch_string == 'source':
- threadpool.queueTask(writeSourceList,
- (suite, component, Options['Incremental']))
+ pool.apply_async(writeSourceList,
+ (suite_id, component_id, Options['Incremental']), callback=log)
elif architecture.arch_string == 'all':
- threadpool.queueTask(writeAllList,
- (suite, component, architecture, 'deb',
- Options['Incremental']))
- threadpool.queueTask(writeAllList,
- (suite, component, architecture, 'udeb',
- Options['Incremental']))
+ pool.apply_async(writeAllList,
+ (suite_id, component_id, architecture_id, 'deb',
+ Options['Incremental']), callback=log)
+ pool.apply_async(writeAllList,
+ (suite_id, component_id, architecture_id, 'udeb',
+ Options['Incremental']), callback=log)
else: # arch any
- threadpool.queueTask(writeBinaryList,
- (suite, component, architecture, 'deb',
- Options['Incremental']))
- threadpool.queueTask(writeBinaryList,
- (suite, component, architecture, 'udeb',
- Options['Incremental']))
- threadpool.joinAll()
+ pool.apply_async(writeBinaryList,
+ (suite_id, component_id, architecture_id, 'deb',
+ Options['Incremental']), callback=log)
+ pool.apply_async(writeBinaryList,
+ (suite_id, component_id, architecture_id, 'udeb',
+ Options['Incremental']), callback=log)
+ pool.close()
+ pool.join()
# this script doesn't change the database
session.close()
('Codename', 'codename') )
# A "Sub" Release file has slightly different fields
- subattribs = ( ('Origin', 'origin'),
+ subattribs = ( ('Archive', 'suite_name'),
+ ('Origin', 'origin'),
('Label', 'label'),
- ('Archive', 'suite_name'),
('Version', 'version') )
# Boolean stuff. If we find it true in database, write out "yes" into the release file
out.write("Description: %s\n" % suite.description)
for comp in components:
- for dirpath, dirnames, filenames in os.walk("%sdists/%s/%s" % (cnf["Dir::Root"], suite.suite_name, comp), topdown=True):
+ for dirpath, dirnames, filenames in os.walk("%sdists/%s/%s%s" % (cnf["Dir::Root"], suite.suite_name, suite_suffix, comp), topdown=True):
if not re_gensubrelease.match(dirpath):
continue
subfile = os.path.join(dirpath, "Release")
- subrel = open(subfile, "w")
+ subrel = open(subfile + '.new', "w")
for key, dbfield in subattribs:
if getattr(suite, dbfield) is not None:
subrel.write("%s: yes\n" % (key))
subrel.write("Component: %s%s\n" % (suite_suffix, comp))
+
+ # Urgh, but until we have all the suite/component/arch stuff in the DB,
+ # this'll have to do
+ arch = os.path.split(dirpath)[-1]
+ if arch.startswith('binary-'):
+ arch = arch[7:]
+
+ subrel.write("Architecture: %s\n" % (arch))
subrel.close()
+ os.rename(subfile + '.new', subfile)
+
# Now that we have done the groundwork, we want to get off and add the files with
# their checksums to the main Release file
oldcwd = os.getcwd()
################################################################################
Cnf = None
-required_database_schema = 54
+required_database_schema = 55
################################################################################
# suppress some deprecation warnings in squeeze related to apt_pkg
# module
import warnings
-warnings.filterwarnings('ignore', \
- "Attribute '.*' of the 'apt_pkg\.Configuration' object is deprecated, use '.*' instead\.", \
- DeprecationWarning)
-warnings.filterwarnings('ignore', \
- "apt_pkg\.newConfiguration\(\) is deprecated\. Use apt_pkg\.Configuration\(\) instead\.", \
- DeprecationWarning)
+warnings.filterwarnings('ignore', ".*apt_pkg.* is deprecated.*", DeprecationWarning)
+
+################################################################################
def which_conf_file():
return os.getenv("DAK_CONFIG", default_config)
This function is called in a new subprocess and multiprocessing wants a top
level function.
'''
- session = DBConn().session()
+ session = DBConn().session(work_mem = 1000)
suite = Suite.get(suite_id, session)
architecture = Architecture.get(arch_id, session)
overridetype = OverrideType.get(overridetype_id, session)
This function is called in a new subprocess and multiprocessing wants a top
level function.
'''
- session = DBConn().session()
+ session = DBConn().session(work_mem = 1000)
suite = Suite.get(suite_id, session)
component = Component.get(component_id, session)
log_message = [suite.suite_name, 'source', component.component_name]
'almost_obsolete_all_associations',
'almost_obsolete_src_associations',
'any_associations_source',
- 'bin_assoc_by_arch',
'bin_associations_binaries',
'binaries_suite_arch',
'binfiles_suite_component_arch',
self.__setupmappers()
self.pid = os.getpid()
- def session(self):
+ def session(self, work_mem = 0):
+ '''
+ Returns a new session object. If a work_mem parameter is provided a new
+ transaction is started and the work_mem parameter is set for this
+ transaction. The work_mem parameter is measured in MB. A default value
+ will be used if the parameter is not set.
+ '''
# reinitialize DBConn in new processes
if self.pid != os.getpid():
clear_mappers()
self.__createconn()
- return self.db_smaker()
+ session = self.db_smaker()
+ if work_mem > 0:
+ session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
+ return session
__all__.append('DBConn')
from lintian import parse_lintian_output, generate_reject_messages
from contents import UnpackedSource
-# suppress some deprecation warnings in squeeze related to apt_pkg
-# module
-import warnings
-warnings.filterwarnings('ignore', \
- "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
- DeprecationWarning)
-warnings.filterwarnings('ignore', \
- "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
- DeprecationWarning)
-
###############################################################################
def get_type(f, session):
done
exit $ERRVAL
}
-trap cleanup ERR EXIT TERM HUP INT QUIT
base="${base}/scripts/builddkeyrings"
INCOMING="${base}/incoming"
ERRORS="${base}/errors"
ADMINS="${base}/adminkeys.gpg"
+STAMPFILE="${base}/updatedkeyring"
# Default options for our gpg calls
DEFGPGOPT="--no-default-keyring --batch --no-tty --no-options --exit-on-status-write-error --no-greeting"
exit 1
fi
+cd "${INCOMING}"
+KEYS=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.key | sed -e "s,./,," | xargs)
+if [ -z "${KEYS}" ]; then
+ exit 0
+fi
+
+trap cleanup ERR EXIT TERM HUP INT QUIT
+
+# Tell prepare-dir that there is an update and it can run
+touch "${STAMPFILE}"
+
# Whenever something goes wrong, its put in there.
mkdir -p "${ERRORS}"
# We process all new files in our incoming directory
-for file in $(ls -1 ${INCOMING}/*.key); do
+for file in ${KEYS}; do
file=${file##*/}
# First we want to see if we recognize the filename. The buildd people have
# to follow a certain schema:
- # architecture_builddname.YEAR-MONTH-DAY_HOUR:MINUTE.key
- if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}:[0-9]{2}).key ]]; then
+ # architecture_builddname.YEAR-MONTH-DAY_HOURMINUTE.key
+ if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}[0-9]{2}).key ]]; then
ARCH=${BASH_REMATCH[1]}
BUILDD=${BASH_REMATCH[2]}
# Right now timestamp is unused
# We need to check for the amount of keys
ARCHKEYRING="${base}/${ARCH}/keyring.gpg"
- KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys "buildd_${ARCH}-${BUILDD}@buildd.debian.org" | grep -c '^pub:')
+ KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys "buildd_${ARCH}-${BUILDD}@buildd.debian.org" | grep -c '^pub:' || /bin/true )
if [ ${KEYNO} -gt 2 ]; then
DATE=$(date -Is)
mv "${INCOMING}/${file}" "${ERRORS}/toomany.${file}.${DATE}"
export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars
. $SCRIPTVARS
-umask 027
-
# And use one locale, no matter what the caller has set
export LANG=C
export LC_ALL=C
# should be relative to the general base dir later
COPYTARGET="${base}/keyrings"
+GNUPGHOME="${base}/s3kr1t/dot-gnupg"
base="${base}/scripts/builddkeyrings"
TARGET="${base}/keyrings"
REMOVED="${base}/removed-buildd-keys.gpg"
+STAMPFILE="${base}/updatedkeyring"
-mkdir -p "${TARGET}/keyrings"
+mkdir -p "${TARGET}/buildd-keyrings"
-for arch in $archs; do
- if [ -f ${base}/${arch}/keyring.gpg ]; then
- cp -al ${base}/${arch}/keyring.gpg ${TARGET}/keyrings/buildd-${arch}-keyring.gpg
- chmod 0644 ${TARGET}/keyrings/buildd-${arch}-keyring.gpg
- fi
-done
+if [ -f "${STAMPFILE}" ]; then
+ rm -f "${STAMPFILE}"
+ for arch in $archs; do
+ if [ -f ${base}/${arch}/keyring.gpg ]; then
+ cp -afl ${base}/${arch}/keyring.gpg ${TARGET}/buildd-keyrings/buildd-${arch}-keyring.gpg
+ chmod 0644 ${TARGET}/buildd-keyrings/buildd-${arch}-keyring.gpg
+ fi
+ done
-cd ${TARGET}
-sha512sum keyrings/* > sha512sums
+ cd ${TARGET}
+ sha512sum buildd-keyrings/* > sha512sums
-rm -f ${TARGET}/sha512sums.txt
-SIGNINGKEY=$(dak admin c signingkeyids)
-gpg --no-options --batch --no-tty --armour --default-key ${SIGNINKEY} --clearsign -o "${TARGET}/sha512sums.txt" "${TARGET}/sha512sums"
-rm -f ${TARGET}/sha512sums
+ rm -f ${TARGET}/sha512sums.txt
+ SIGNINGKEY=B1326A8D
+ GNUPGHOME=${GNUPGHOME} gpg --no-options --batch --no-tty --armour --default-key ${SIGNINGKEY} --clearsign -o "${TARGET}/sha512sums.txt" "${TARGET}/sha512sums"
+ rm -f ${TARGET}/sha512sums
+fi
done
exit $ERRVAL
}
-trap cleanup ERR EXIT TERM HUP INT QUIT
base="${base}/scripts/builddkeyrings"
INCOMING="${base}/incoming"
ERRORS="${base}/errors"
ADMINS="${base}/adminkeys.gpg"
REMOVED="${base}/removed-buildd-keys.gpg"
+STAMPFILE="${base}/updatedkeyring"
# Default options for our gpg calls
DEFGPGOPT="--no-default-keyring --batch --no-tty --no-options --exit-on-status-write-error --no-greeting"
exit 1
fi
+cd "${INCOMING}"
+KEYS=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.del | sed -e "s,./,," | xargs)
+if [ -z "${KEYS}" ]; then
+ exit 0
+fi
+
+trap cleanup ERR EXIT TERM HUP INT QUIT
+
+# Tell prepare-dir that there is an update and it can run
+touch "${STAMPFILE}"
+
# Whenever something goes wrong, its put in there.
mkdir -p "${ERRORS}"
# We process all new files in our incoming directory
-for file in $(ls -1 ${INCOMING}/*.del ); do
+for file in ${KEYS}; do
file=${file##*/}
# First we want to see if we recognize the filename. The buildd people have
# to follow a certain schema:
- # architecture_builddname.YEAR-MONTH-DAY_HOUR:MINUTE.del
- if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}:[0-9]{2}).del ]]; then
+ # architecture_builddname.YEAR-MONTH-DAY_HOURMINUTE.del
+ if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}[0-9]{2}).del ]]; then
ARCH=${BASH_REMATCH[1]}
BUILDD=${BASH_REMATCH[2]}
# Right now timestamp is unused
ARCHKEYRING="${base}/${ARCH}/keyring.gpg"
# Is the key in there?
- KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys ${KEYID} | grep -c '^pub:')
+ KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys ${KEYID} | grep -c '^pub:' || /bin/true )
if [ $KEYNO -eq 1 ]; then
# Right, exactly one there, lets get rid of it
#!/bin/bash
#
-# $Id: ddtp_i18n_check.sh 1670 2009-03-31 20:57:49Z nekral-guest $
+# $Id: ddtp_i18n_check.sh 2535 2011-02-19 14:20:52Z nekral-guest $
#
-# Copyright (C) 2008, Felipe Augusto van de Wiel <faw@funlabs.org>
+# Copyright (C) 2008, 2011 Felipe Augusto van de Wiel <faw@funlabs.org>
# Copyright (C) 2008, 2009 Nicolas François <nicolas.francois@centraliens.net>
#
# This program is free software; you can redistribute it and/or modify
# files.
DRY_RUN=0
+# When GEN_IDX=1, we create the Index files. There is a runtime option
+# to not create/generate the Index file.
+GEN_IDX=1
+
dists_parent_dir=""
# If no argument indicates the PACKAGES_LISTS_DIR then use '.'
PACKAGES_LISTS_DIR=""
echo " --debug Debug mode: do not stop after the first error" >&2
echo " --dry-run Do not generate the compressed version of the " >&2
echo " Translation files">&2
+ echo " --no-index Do not generate the Index files" >&2
exit 1
}
"--dry-run")
DRY_RUN=1
;;
+ "--no-index")
+ GEN_IDX=0
+ ;;
"-*")
usage
;;
usage
fi
-#STABLE="lenny"
+#STABLE="squeeze"
TESTING="wheezy"
UNSTABLE="sid"
# Original SHA256SUMS, generated by i18n.debian.net
-SHA256SUMS="SHA256SUMS"
+CHECKSUMS="SHA256SUMS"
# DAK Timestamp
TIMESTAMP="timestamp"
# These special files must exist on the top of dists_parent_dir
-SPECIAL_FILES="$SHA256SUMS $TIMESTAMP $TIMESTAMP.gpg"
+SPECIAL_FILES="$CHECKSUMS $TIMESTAMP $TIMESTAMP.gpg"
# Temporary working directory. We need a full path to reduce the
-# complexity of checking SHA256SUMS and cleaning/removing TMPDIR
+# complexity of checking CHECKSUMS and cleaning/removing TMPDIR
TEMP_WORK_DIR=$(mktemp -d -t ddtp_dinstall_tmpdir.XXXXXX)
cd "$TEMP_WORK_DIR"
TMP_WORK_DIR=$(pwd)
trap_exit () {
rm -rf "$TMP_WORK_DIR"
rm -f "$dists_parent_dir"/dists/*/main/i18n/Translation-*.bz2
+ rm -f "$dists_parent_dir"/dists/*/main/i18n/Index
exit 1
}
trap trap_exit EXIT HUP INT QUIT TERM
fi
done
-# Comparing SHA256SUMS
-# We don use -c because a file could exist in the directory tree and not in
-# the SHA256SUMS, so we sort the existing SHA256SUMS and we create a new one
+# Comparing CHECKSUMS
+# We don't use -c because a file could exist in the directory tree and not in
+# the CHECKSUMS, so we sort the existing CHECKSUMS and we create a new one
# already sorted, if cmp fails then files are different and we don't want to
# continue.
cd "$dists_parent_dir"
-find dists -type f -print0 |xargs --null sha256sum > "$TMP_WORK_DIR/$SHA256SUMS.new"
-sort "$SHA256SUMS" > "$TMP_WORK_DIR/$SHA256SUMS.sorted"
-sort "$TMP_WORK_DIR/$SHA256SUMS.new" > "$TMP_WORK_DIR/$SHA256SUMS.new.sorted"
-if ! cmp --quiet "$TMP_WORK_DIR/$SHA256SUMS.sorted" "$TMP_WORK_DIR/$SHA256SUMS.new.sorted"; then
- echo "Failed to compare the SHA256SUMS, they are not identical!" >&2
- diff -au "$TMP_WORK_DIR/$SHA256SUMS.sorted" "$TMP_WORK_DIR/$SHA256SUMS.new.sorted" >&2
+find dists -type f -print0 |xargs --null sha256sum > "$TMP_WORK_DIR/$CHECKSUMS.new"
+sort "$CHECKSUMS" > "$TMP_WORK_DIR/$CHECKSUMS.sorted"
+sort "$TMP_WORK_DIR/$CHECKSUMS.new" > "$TMP_WORK_DIR/$CHECKSUMS.new.sorted"
+if ! cmp --quiet "$TMP_WORK_DIR/$CHECKSUMS.sorted" "$TMP_WORK_DIR/$CHECKSUMS.new.sorted"; then
+ echo "Failed to compare the $CHECKSUMS, they are not identical!" >&2
+ diff -au "$TMP_WORK_DIR/$CHECKSUMS.sorted" "$TMP_WORK_DIR/$CHECKSUMS.new.sorted" >&2
exit 1
fi
cd "$OLDPWD"
if ! is_dirname_okay "$f"; then
echo "Wrong directory name: $f" >&2
exit 1
+ else
+ # If the directory name is OK, and if it's name is i18n
+ # and GEN_IDX is enabled, we generate the header of the
+ # Index file
+ if [ "$(basename $f)" = "i18n" -a "$GEN_IDX" = "1" ];
+ then
+ echo "SHA1:" > "$f/Index"
+ fi
fi
elif [ -f "$f" ]; then
# If $f is in $SPECIAL_FILES, we skip to the next loop because
# Now generate the compressed files
bzip2 "$f"
fi
+
+ # Create Index
+ if [ "$GEN_IDX" = "1" ]; then
+ fbz=${f}.bz2
+ IDX=$(dirname $f)
+ tf_name=$(basename $fbz)
+ tf_sha1=$(sha1sum $fbz)
+ tf_size=$(du $fbz)
+ printf ' %s % 7s %s\n' "${tf_sha1% *}" \
+ "${tf_size% *}" "${tf_name}" >> "$IDX/Index"
+ fi
else
echo "Neither a file or directory: $f" >&2
exit 1
# names of the keyring files
@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
- "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg" );
+ "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
# our log file
$logfile = "$queued_dir/log";
$statusdelay = 30;
# names of the keyring files
-@keyrings = ( "/srv/backports-master.debian.org/keyrings/keyring.gpg" );
+@keyrings = ( "/srv/backports-master.debian.org/keyrings/keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
# our log file
$logfile = "$queued_dir/log";
$statusdelay = 30;
# names of the keyring files
-@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg" );
+@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
# our log file
$logfile = "$queued_dir/log";
# names of the keyring files
@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
- "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg" );
+ "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
# our log file
$logfile = "$queued_dir/log";