]> git.decadent.org.uk Git - dak.git/commitdiff
Merge remote-tracking branch 'ansgar/p-s-from-db' into merge
authorJoerg Jaspert <joerg@debian.org>
Sat, 26 Mar 2011 11:08:41 +0000 (12:08 +0100)
committerJoerg Jaspert <joerg@debian.org>
Sat, 26 Mar 2011 11:08:41 +0000 (12:08 +0100)
* ansgar/p-s-from-db:
  Add order column to metadata_keys
  generate-packages-sources2: various bugs fixed
  sort generated Packages and Sources
  generate Packages/Sources directly from database

Signed-off-by: Joerg Jaspert <joerg@debian.org>
21 files changed:
config/backports/cron.hourly
config/debian-security/cron.hourly [new file with mode: 0755]
config/debian/cron.hourly
config/homedir/syncdd.sh
dak/admin.py
dak/dakdb/update55.py [new file with mode: 0755]
dak/generate_filelist.py
dak/generate_releases.py
dak/update_db.py
daklib/config.py
daklib/contents.py
daklib/dbconn.py
daklib/queue.py
scripts/debian/buildd-add-keys
scripts/debian/buildd-prepare-dir
scripts/debian/buildd-remove-keys
scripts/debian/ddtp-i18n-check.sh
tools/debianqueued-0.9/config
tools/debianqueued-0.9/config-backports
tools/debianqueued-0.9/config-security
tools/debianqueued-0.9/config-upload

index 24b8f90f530a1e47008d363aa263e4ff03f6a1f7..fcab3b355cf6f3bfc587144c9fdddbda62b11a80 100755 (executable)
@@ -25,3 +25,19 @@ dak show-new > /dev/null
 $base/dak/tools/removals.pl $configdir/removalsrss.rc > $webdir/removals.rss
 
 #$scriptsdir/generate-di
+
+# do the buildd key updates
+BUILDDFUN=$(mktemp -p "${TMPDIR}" BUILDDFUN.XXXXXX)
+exec >> "${BUILDDFUN}" 2>&1
+#${scriptsdir}/buildd-remove-keys
+#${scriptsdir}/buildd-add-keys
+#${scriptsdir}/buildd-prepare-dir
+for keyring in $(dak admin k list-binary); do
+    dak import-keyring --generate-users "%s" ${keyring}
+done
+exec >>/dev/null 2>&1
+
+DATE=$(date -Is)
+cat "${BUILDDFUN}" | mail -a "X-Debian: DAK" -e -s "[$(hostname -s)] Buildd key changes ${DATE}" buildd-keys@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
+
+rm -f "${BUILDDFUN}"
diff --git a/config/debian-security/cron.hourly b/config/debian-security/cron.hourly
new file mode 100755 (executable)
index 0000000..ddbf09f
--- /dev/null
@@ -0,0 +1,43 @@
+#! /bin/bash
+#
+# Executed hourly via cron, out of dak's crontab.
+
+set -e
+set -u
+
+export SCRIPTVARS=/srv/security-master.debian.org/dak/config/debian-security/vars
+. $SCRIPTVARS
+
+dak import-users-from-passwd
+# dak queue-report -n > $webdir/new.html
+# dak queue-report -8 -d new,byhand,proposedupdates,oldproposedupdates -r $webdir/stat
+# dak show-deferred -r $webdir/stat > ${webdir}/deferred.html
+# dak graph -n new,byhand,proposedupdates,oldproposedupdates,deferred -r $webdir/stat -i $webdir/stat -x $scriptsdir/rrd-release-freeze-dates
+# dak show-new > /dev/null
+
+# cd $webdir
+# cat removals-20*.txt > removals-full.txt
+# cat removals.txt >> removals-full.txt
+# cat removals-20*.822 > removals-full.822
+# cat removals.822 >> removals-full.822
+
+#$base/dak/tools/queue_rss.py -q $queuedir/new -o $webdir/rss/ -d $base/misc -l $base/log/
+#$base/dak/tools/removals.pl $configdir/removalsrss.rc > $webdir/removals.rss
+
+#$scriptsdir/generate-di
+
+# do the buildd key updates
+BUILDDFUN=$(mktemp -p "${TMPDIR}" BUILDDFUN.XXXXXX)
+exec >> "${BUILDDFUN}" 2>&1
+#${scriptsdir}/buildd-remove-keys
+#${scriptsdir}/buildd-add-keys
+#${scriptsdir}/buildd-prepare-dir
+for keyring in $(dak admin k list-binary); do
+    dak import-keyring --generate-users "%s" ${keyring}
+done
+exec >>/dev/null 2>&1
+
+DATE=$(date -Is)
+cat "${BUILDDFUN}" | mail -a "X-Debian: DAK" -e -s "[$(hostname -s)] Buildd key changes ${DATE}" buildd-keys@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
+
+rm -f "${BUILDDFUN}"
index c20f4d1da3add2861129c7904f9cd26d2c4fb81b..aef24c09515a023aa49b7fad2521cd080b6de4f5 100755 (executable)
@@ -30,7 +30,26 @@ cat removals.822 >> removals-full.822
 $base/dak/tools/queue_rss.py -q $queuedir/new -o $webdir/rss/ -d $base/misc -l $base/log/
 $base/dak/tools/removals.pl $configdir/removalsrss.rc > $webdir/rss/removals.rss
 
+
 # Tell ries to sync its tree
 ssh -o Batchmode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 -i ${base}/s3kr1t/pushddmirror dak@ries.debian.org sync
 
 $scriptsdir/generate-di
+
+
+# do the buildd key updates
+BUILDDFUN=$(mktemp -p "${TMPDIR}" BUILDDFUN.XXXXXX)
+exec >> "${BUILDDFUN}" 2>&1
+${scriptsdir}/buildd-remove-keys
+${scriptsdir}/buildd-add-keys
+${scriptsdir}/buildd-prepare-dir
+
+for keyring in $(dak admin k list-binary); do
+    dak import-keyring --generate-users "%s" ${keyring}
+done
+exec >>/dev/null 2>&1
+
+DATE=$(date -Is)
+cat "${BUILDDFUN}" | mail -a "X-Debian: DAK" -e -s "[$(hostname -s)] Buildd key changes ${DATE}" buildd-keys@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
+
+rm -f "${BUILDDFUN}"
index 652c29a08accfee7840f66cfd75622925df16af7..9260e76402b32162cabed1fc6158b2e75f9dd104 100755 (executable)
@@ -75,31 +75,33 @@ trap cleanup EXIT TERM HUP INT QUIT
 if lockfile -r3 ${HOME}/sync.lock; then
     cd $base/
     rsync -aH -B8192 \
-           --exclude backup/*.xz \
-           --exclude backup/dump* \
+        --exclude backup/*.xz \
+        --exclude backup/dump* \
+        --exclude database/*.db \
         ${EXTRA} \
-           --exclude mirror \
-           --exclude morgue/ \
-           --exclude=lost+found/ \
-           --exclude .da-backup.trace \
-           --delete \
-           --delete-after \
-           --timeout 3600 \
-           -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
-           ftpmaster-sync:/srv/ftp-master.debian.org/ .
+        --exclude mirror \
+        --exclude morgue/ \
+        --exclude=lost+found/ \
+        --exclude .da-backup.trace \
+        --exclude lock/stages/ \
+        --delete \
+        --delete-after \
+        --timeout 3600 \
+        -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
+        ftpmaster-sync:/srv/ftp-master.debian.org/ .
 
     cd $public/
     rsync -aH -B8192 \
-           --exclude mirror \
-           --exclude rsync/ \
-           --exclude=lost+found/ \
-           --exclude .da-backup.trace \
-           --exclude web-users/ \
-           --delete \
-           --delete-after \
-           --timeout 3600 \
-           -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
-           ftpmaster-sync2:/srv/ftp.debian.org/ .
+        --exclude mirror \
+        --exclude rsync/ \
+        --exclude=lost+found/ \
+        --exclude .da-backup.trace \
+        --exclude web-users/ \
+        --delete \
+        --delete-after \
+        --timeout 3600 \
+        -e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
+        ftpmaster-sync2:/srv/ftp.debian.org/ .
 
 else
     echo "Couldn't get the lock, not syncing"
index d159651e75df850904971df8f7b3705b4bbd6837..1dc7e7bc3a449512245db15a684287800df8e4f3 100755 (executable)
@@ -61,6 +61,11 @@ Perform administrative work on the dak database.
      c db-shell             show db config in a usable form for psql
      c NAME                 show option NAME as set in configuration table
 
+  keyring / k:
+     k list-all             list all keyrings
+     k list-binary          list all keyrings with a NULL source acl
+     k list-source          list all keyrings with a non NULL source acl
+
   architecture / a:
      a list                 show a list of architectures
      a rm ARCH              remove an architecture (will only work if
@@ -469,6 +474,35 @@ dispatch['c'] = show_config
 
 ################################################################################
 
+def show_keyring(command):
+    args = [str(x) for x in command]
+    cnf = utils.get_conf()
+
+    die_arglen(args, 2, "E: keyring needs at least a command")
+
+    mode = args[1].lower()
+
+    d = DBConn()
+
+    q = d.session().query(Keyring).filter(Keyring.active == True)
+
+    if mode == 'list-all':
+        pass
+    elif mode == 'list-binary':
+        q = q.filter(Keyring.default_source_acl_id == None)
+    elif mode == 'list-source':
+        q = q.filter(Keyring.default_source_acl_id != None)
+    else:
+        die("E: keyring command unknown")
+
+    for k in q.all():
+        print k.keyring_name
+
+dispatch['keyring'] = show_keyring
+dispatch['k'] = show_keyring
+
+################################################################################
+
 def main():
     """Perform administrative work on the dak database"""
     global dryrun
diff --git a/dak/dakdb/update55.py b/dak/dakdb/update55.py
new file mode 100755 (executable)
index 0000000..3328e85
--- /dev/null
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Drop unused view bin_assoc_by_arch.
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2011 Torsten Werner <twerner@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from socket import gethostname;
+
+################################################################################
+def do_update(self):
+    """
+    Drop unused view bin_assoc_by_arch.
+    """
+    print __doc__
+    try:
+        c = self.db.cursor()
+
+        c.execute("""
+            DROP VIEW bin_assoc_by_arch""")
+
+        c.execute("UPDATE config SET value = '55' WHERE name = 'db_revision'")
+        self.db.commit()
+
+    except psycopg2.ProgrammingError, msg:
+        self.db.rollback()
+        raise DBUpdateError, 'Unable to apply sick update 55, rollback issued. Error message : %s' % (str(msg))
index 1f4d665495ed21497b421f2d8ea5d4c7b6080570..2a6d218badf2e6e12a6d2b9a43151a4400d4906c 100755 (executable)
@@ -5,6 +5,7 @@ Generate file lists for apt-ftparchive.
 
 @contact: Debian FTP Master <ftpmaster@debian.org>
 @copyright: 2009  Torsten Werner <twerner@debian.org>
+@copyright: 2011  Ansgar Burchardt <ansgar@debian.org>
 @license: GNU General Public License version 2 or later
 """
 
@@ -37,8 +38,8 @@ Generate file lists for apt-ftparchive.
 
 from daklib.dbconn import *
 from daklib.config import Config
-from daklib.threadpool import ThreadPool
-from daklib import utils
+from daklib import utils, daklog
+from multiprocessing import Pool
 import apt_pkg, os, stat, sys
 
 from daklib.lists import getSources, getBinaries, getArchAll
@@ -64,37 +65,48 @@ def listPath(suite, component, architecture = None, type = None,
         file.truncate()
     return (file, timestamp)
 
-def writeSourceList(args):
-    (suite, component, incremental_mode) = args
+def writeSourceList(suite_id, component_id, incremental_mode):
+    session = DBConn().session()
+    suite = Suite.get(suite_id, session)
+    component = Component.get(component_id, session)
     (file, timestamp) = listPath(suite, component,
             incremental_mode = incremental_mode)
-    session = DBConn().session()
+
     for _, filename in getSources(suite, component, session, timestamp):
         file.write(filename + '\n')
     session.close()
     file.close()
+    return "sources list for %s %s" % (suite.suite_name, component.component_name)
 
-def writeAllList(args):
-    (suite, component, architecture, type, incremental_mode) = args
+def writeAllList(suite_id, component_id, architecture_id, type, incremental_mode):
+    session = DBConn().session()
+    suite = Suite.get(suite_id, session)
+    component = Component.get(component_id, session)
+    architecture = Architecture.get(architecture_id, session)
     (file, timestamp) = listPath(suite, component, architecture, type,
             incremental_mode)
-    session = DBConn().session()
+
     for _, filename in getArchAll(suite, component, architecture, type,
             session, timestamp):
         file.write(filename + '\n')
     session.close()
     file.close()
+    return "all list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type)
 
-def writeBinaryList(args):
-    (suite, component, architecture, type, incremental_mode) = args
+def writeBinaryList(suite_id, component_id, architecture_id, type, incremental_mode):
+    session = DBConn().session()
+    suite = Suite.get(suite_id, session)
+    component = Component.get(component_id, session)
+    architecture = Architecture.get(architecture_id, session)
     (file, timestamp) = listPath(suite, component, architecture, type,
             incremental_mode)
-    session = DBConn().session()
+
     for _, filename in getBinaries(suite, component, architecture, type,
             session, timestamp):
         file.write(filename + '\n')
     session.close()
     file.close()
+    return "binary list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type)
 
 def usage():
     print """Usage: dak generate_filelist [OPTIONS]
@@ -114,6 +126,7 @@ Incremental mode appends only newer files to existing lists."""
 
 def main():
     cnf = Config()
+    Logger = daklog.Logger(cnf, 'generate-filelist')
     Arguments = [('h', "help",         "Filelist::Options::Help"),
                  ('s', "suite",        "Filelist::Options::Suite", "HasArg"),
                  ('c', "component",    "Filelist::Options::Component", "HasArg"),
@@ -140,36 +153,44 @@ def main():
     Options = cnf.SubTree("Filelist::Options")
     if Options['Help']:
         usage()
-    threadpool = ThreadPool()
+    pool = Pool()
     query_suites = query_suites. \
         filter(Suite.suite_name.in_(utils.split_args(Options['Suite'])))
     query_components = query_components. \
         filter(Component.component_name.in_(utils.split_args(Options['Component'])))
     query_architectures = query_architectures. \
         filter(Architecture.arch_string.in_(utils.split_args(Options['Architecture'])))
+
+    def log(message):
+        Logger.log([message])
+
     for suite in query_suites:
+        suite_id = suite.suite_id
         for component in query_components:
+            component_id = component.component_id
             for architecture in query_architectures:
+                architecture_id = architecture.arch_id
                 if architecture not in suite.architectures:
                     pass
                 elif architecture.arch_string == 'source':
-                    threadpool.queueTask(writeSourceList,
-                        (suite, component, Options['Incremental']))
+                    pool.apply_async(writeSourceList,
+                        (suite_id, component_id, Options['Incremental']), callback=log)
                 elif architecture.arch_string == 'all':
-                    threadpool.queueTask(writeAllList,
-                        (suite, component, architecture, 'deb',
-                            Options['Incremental']))
-                    threadpool.queueTask(writeAllList,
-                        (suite, component, architecture, 'udeb',
-                            Options['Incremental']))
+                    pool.apply_async(writeAllList,
+                        (suite_id, component_id, architecture_id, 'deb',
+                            Options['Incremental']), callback=log)
+                    pool.apply_async(writeAllList,
+                        (suite_id, component_id, architecture_id, 'udeb',
+                            Options['Incremental']), callback=log)
                 else: # arch any
-                    threadpool.queueTask(writeBinaryList,
-                        (suite, component, architecture, 'deb',
-                            Options['Incremental']))
-                    threadpool.queueTask(writeBinaryList,
-                        (suite, component, architecture, 'udeb',
-                            Options['Incremental']))
-    threadpool.joinAll()
+                    pool.apply_async(writeBinaryList,
+                        (suite_id, component_id, architecture_id, 'deb',
+                            Options['Incremental']), callback=log)
+                    pool.apply_async(writeBinaryList,
+                        (suite_id, component_id, architecture_id, 'udeb',
+                            Options['Incremental']), callback=log)
+    pool.close()
+    pool.join()
     # this script doesn't change the database
     session.close()
 
index e67bd91f6d2b3e0ed0078dc93d72d1e0c51002a9..b21f30a5483f11ab5997bb393fbcb603473bd252 100755 (executable)
@@ -138,9 +138,9 @@ class ReleaseWriter(object):
                     ('Codename',    'codename') )
 
         # A "Sub" Release file has slightly different fields
-        subattribs = ( ('Origin',   'origin'),
+        subattribs = ( ('Archive',  'suite_name'),
+                       ('Origin',   'origin'),
                        ('Label',    'label'),
-                       ('Archive',  'suite_name'),
                        ('Version',  'version') )
 
         # Boolean stuff. If we find it true in database, write out "yes" into the release file
@@ -182,12 +182,12 @@ class ReleaseWriter(object):
             out.write("Description: %s\n" % suite.description)
 
         for comp in components:
-            for dirpath, dirnames, filenames in os.walk("%sdists/%s/%s" % (cnf["Dir::Root"], suite.suite_name, comp), topdown=True):
+            for dirpath, dirnames, filenames in os.walk("%sdists/%s/%s%s" % (cnf["Dir::Root"], suite.suite_name, suite_suffix, comp), topdown=True):
                 if not re_gensubrelease.match(dirpath):
                     continue
 
                 subfile = os.path.join(dirpath, "Release")
-                subrel = open(subfile, "w")
+                subrel = open(subfile + '.new', "w")
 
                 for key, dbfield in subattribs:
                     if getattr(suite, dbfield) is not None:
@@ -198,8 +198,18 @@ class ReleaseWriter(object):
                         subrel.write("%s: yes\n" % (key))
 
                 subrel.write("Component: %s%s\n" % (suite_suffix, comp))
+
+                # Urgh, but until we have all the suite/component/arch stuff in the DB,
+                # this'll have to do
+                arch = os.path.split(dirpath)[-1]
+                if arch.startswith('binary-'):
+                    arch = arch[7:]
+
+                subrel.write("Architecture: %s\n" % (arch))
                 subrel.close()
 
+                os.rename(subfile + '.new', subfile)
+
         # Now that we have done the groundwork, we want to get off and add the files with
         # their checksums to the main Release file
         oldcwd = os.getcwd()
index 88ff20f52c1e842bb025da4213349c1d013020bc..e09f3ad4530bb5f6491addbbb54b56548a4a4b31 100755 (executable)
@@ -46,7 +46,7 @@ from daklib.daklog import Logger
 ################################################################################
 
 Cnf = None
-required_database_schema = 54
+required_database_schema = 55
 
 ################################################################################
 
index 9993ec3adbfb3bb2d61c4168d726e72eb7bc5cad..ed8cf1d0d0e7cb57b2ba0ff1fdffdd6922508e81 100755 (executable)
@@ -39,12 +39,9 @@ default_config = "/etc/dak/dak.conf" #: default dak config, defines host propert
 # suppress some deprecation warnings in squeeze related to apt_pkg
 # module
 import warnings
-warnings.filterwarnings('ignore', \
-    "Attribute '.*' of the 'apt_pkg\.Configuration' object is deprecated, use '.*' instead\.", \
-    DeprecationWarning)
-warnings.filterwarnings('ignore', \
-    "apt_pkg\.newConfiguration\(\) is deprecated\. Use apt_pkg\.Configuration\(\) instead\.", \
-    DeprecationWarning)
+warnings.filterwarnings('ignore', ".*apt_pkg.* is deprecated.*", DeprecationWarning)
+
+################################################################################
 
 def which_conf_file():
     return os.getenv("DAK_CONFIG", default_config)
index f3077aab6487eede63f71341d01ede61fec16525..449fb88e126fdc13038c1fd512edf0631bfff060 100755 (executable)
@@ -285,7 +285,7 @@ def binary_helper(suite_id, arch_id, overridetype_id, component_id = None):
     This function is called in a new subprocess and multiprocessing wants a top
     level function.
     '''
-    session = DBConn().session()
+    session = DBConn().session(work_mem = 1000)
     suite = Suite.get(suite_id, session)
     architecture = Architecture.get(arch_id, session)
     overridetype = OverrideType.get(overridetype_id, session)
@@ -304,7 +304,7 @@ def source_helper(suite_id, component_id):
     This function is called in a new subprocess and multiprocessing wants a top
     level function.
     '''
-    session = DBConn().session()
+    session = DBConn().session(work_mem = 1000)
     suite = Suite.get(suite_id, session)
     component = Component.get(component_id, session)
     log_message = [suite.suite_name, 'source', component.component_name]
index ae5a9e0d21b6583712fcc5d7fd3578cb55479876..6948cf6c27902528030c17f0f8f602811a3b79dc 100755 (executable)
@@ -3219,7 +3219,6 @@ class DBConn(object):
             'almost_obsolete_all_associations',
             'almost_obsolete_src_associations',
             'any_associations_source',
-            'bin_assoc_by_arch',
             'bin_associations_binaries',
             'binaries_suite_arch',
             'binfiles_suite_component_arch',
@@ -3591,12 +3590,21 @@ class DBConn(object):
         self.__setupmappers()
         self.pid = os.getpid()
 
-    def session(self):
+    def session(self, work_mem = 0):
+        '''
+        Returns a new session object. If a work_mem parameter is provided a new
+        transaction is started and the work_mem parameter is set for this
+        transaction. The work_mem parameter is measured in MB. A default value
+        will be used if the parameter is not set.
+        '''
         # reinitialize DBConn in new processes
         if self.pid != os.getpid():
             clear_mappers()
             self.__createconn()
-        return self.db_smaker()
+        session = self.db_smaker()
+        if work_mem > 0:
+            session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
+        return session
 
 __all__.append('DBConn')
 
index b652f844370f3409c8badd37df2ce86caa3d4998..ef781f19e6c24fcdad659349e6669f6d231b0596 100755 (executable)
@@ -56,16 +56,6 @@ from textutils import fix_maintainer
 from lintian import parse_lintian_output, generate_reject_messages
 from contents import UnpackedSource
 
-# suppress some deprecation warnings in squeeze related to apt_pkg
-# module
-import warnings
-warnings.filterwarnings('ignore', \
-    "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
-    DeprecationWarning)
-warnings.filterwarnings('ignore', \
-    "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
-    DeprecationWarning)
-
 ###############################################################################
 
 def get_type(f, session):
index ddb56a42d6c446b97c4bde3192123e5d787e3621..1283838f14ce04c9da5d8e81ad2b3646fc7d88ea 100755 (executable)
@@ -51,12 +51,12 @@ function cleanup() {
     done
     exit $ERRVAL
 }
-trap cleanup ERR EXIT TERM HUP INT QUIT
 
 base="${base}/scripts/builddkeyrings"
 INCOMING="${base}/incoming"
 ERRORS="${base}/errors"
 ADMINS="${base}/adminkeys.gpg"
+STAMPFILE="${base}/updatedkeyring"
 
 # Default options for our gpg calls
 DEFGPGOPT="--no-default-keyring --batch --no-tty --no-options --exit-on-status-write-error --no-greeting"
@@ -66,16 +66,27 @@ if ! [ -d "${INCOMING}" ]; then
     exit 1
 fi
 
+cd "${INCOMING}"
+KEYS=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.key | sed -e "s,./,," | xargs)
+if [ -z "${KEYS}" ]; then
+    exit 0
+fi
+
+trap cleanup ERR EXIT TERM HUP INT QUIT
+
+# Tell prepare-dir that there is an update and it can run
+touch "${STAMPFILE}"
+
 # Whenever something goes wrong, its put in there.
 mkdir -p "${ERRORS}"
 
 # We process all new files in our incoming directory
-for file in $(ls -1 ${INCOMING}/*.key); do
+for file in ${KEYS}; do
     file=${file##*/}
     # First we want to see if we recognize the filename. The buildd people have
     # to follow a certain schema:
-    # architecture_builddname.YEAR-MONTH-DAY_HOUR:MINUTE.key
-    if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}:[0-9]{2}).key ]]; then
+    # architecture_builddname.YEAR-MONTH-DAY_HOURMINUTE.key
+    if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}[0-9]{2}).key ]]; then
         ARCH=${BASH_REMATCH[1]}
         BUILDD=${BASH_REMATCH[2]}
         # Right now timestamp is unused
@@ -222,7 +233,7 @@ for file in $(ls -1 ${INCOMING}/*.key); do
     # We need to check for the amount of keys
     ARCHKEYRING="${base}/${ARCH}/keyring.gpg"
 
-    KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys "buildd_${ARCH}-${BUILDD}@buildd.debian.org" | grep -c '^pub:')
+    KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys "buildd_${ARCH}-${BUILDD}@buildd.debian.org" | grep -c '^pub:' || /bin/true )
     if [ ${KEYNO} -gt 2 ]; then
         DATE=$(date -Is)
         mv "${INCOMING}/${file}" "${ERRORS}/toomany.${file}.${DATE}"
index e0f6053a7848732d75388148f79cf002036a42be..df4b098e2ba120aa87722436d5b49acb33eafa95 100755 (executable)
@@ -28,8 +28,6 @@ set -E
 export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars
 . $SCRIPTVARS
 
-umask 027
-
 # And use one locale, no matter what the caller has set
 export LANG=C
 export LC_ALL=C
@@ -40,23 +38,28 @@ PROGRAM="buildd-prepare-dir"
 
 # should be relative to the general base dir later
 COPYTARGET="${base}/keyrings"
+GNUPGHOME="${base}/s3kr1t/dot-gnupg"
 base="${base}/scripts/builddkeyrings"
 TARGET="${base}/keyrings"
 REMOVED="${base}/removed-buildd-keys.gpg"
+STAMPFILE="${base}/updatedkeyring"
 
-mkdir -p "${TARGET}/keyrings"
+mkdir -p "${TARGET}/buildd-keyrings"
 
-for arch in $archs; do
-    if [ -f ${base}/${arch}/keyring.gpg ]; then
-        cp -al ${base}/${arch}/keyring.gpg ${TARGET}/keyrings/buildd-${arch}-keyring.gpg
-        chmod 0644 ${TARGET}/keyrings/buildd-${arch}-keyring.gpg
-    fi
-done
+if [ -f "${STAMPFILE}" ]; then
+    rm -f "${STAMPFILE}"
+    for arch in $archs; do
+        if [ -f ${base}/${arch}/keyring.gpg ]; then
+            cp -afl ${base}/${arch}/keyring.gpg ${TARGET}/buildd-keyrings/buildd-${arch}-keyring.gpg
+            chmod 0644 ${TARGET}/buildd-keyrings/buildd-${arch}-keyring.gpg
+        fi
+    done
 
-cd ${TARGET}
-sha512sum keyrings/* > sha512sums
+    cd ${TARGET}
+    sha512sum buildd-keyrings/* > sha512sums
 
-rm -f ${TARGET}/sha512sums.txt
-SIGNINGKEY=$(dak admin c signingkeyids)
-gpg --no-options  --batch --no-tty --armour --default-key ${SIGNINKEY} --clearsign -o "${TARGET}/sha512sums.txt" "${TARGET}/sha512sums"
-rm -f ${TARGET}/sha512sums
+    rm -f ${TARGET}/sha512sums.txt
+    SIGNINGKEY=B1326A8D
+    GNUPGHOME=${GNUPGHOME} gpg --no-options  --batch --no-tty --armour --default-key ${SIGNINGKEY} --clearsign -o "${TARGET}/sha512sums.txt" "${TARGET}/sha512sums"
+    rm -f ${TARGET}/sha512sums
+fi
index c07ff04b144bba0cba43f272a87de293168c5729..3591785d6c028769b169a9b22f1f81b3b1f5e2a9 100755 (executable)
@@ -51,13 +51,13 @@ function cleanup() {
     done
     exit $ERRVAL
 }
-trap cleanup ERR EXIT TERM HUP INT QUIT
 
 base="${base}/scripts/builddkeyrings"
 INCOMING="${base}/incoming"
 ERRORS="${base}/errors"
 ADMINS="${base}/adminkeys.gpg"
 REMOVED="${base}/removed-buildd-keys.gpg"
+STAMPFILE="${base}/updatedkeyring"
 
 # Default options for our gpg calls
 DEFGPGOPT="--no-default-keyring --batch --no-tty --no-options --exit-on-status-write-error --no-greeting"
@@ -67,16 +67,27 @@ if ! [ -d "${INCOMING}" ]; then
     exit 1
 fi
 
+cd "${INCOMING}"
+KEYS=$(find . -maxdepth 1 -mindepth 1 -type f -name \*.del | sed -e "s,./,," | xargs)
+if [ -z "${KEYS}" ]; then
+    exit 0
+fi
+
+trap cleanup ERR EXIT TERM HUP INT QUIT
+
+# Tell prepare-dir that there is an update and it can run
+touch "${STAMPFILE}"
+
 # Whenever something goes wrong, its put in there.
 mkdir -p "${ERRORS}"
 
 # We process all new files in our incoming directory
-for file in $(ls -1 ${INCOMING}/*.del ); do
+for file in ${KEYS}; do
     file=${file##*/}
     # First we want to see if we recognize the filename. The buildd people have
     # to follow a certain schema:
-    # architecture_builddname.YEAR-MONTH-DAY_HOUR:MINUTE.del
-    if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}:[0-9]{2}).del ]]; then
+    # architecture_builddname.YEAR-MONTH-DAY_HOURMINUTE.del
+    if [[ $file =~ (.*)_(.*).([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}[0-9]{2}).del ]]; then
         ARCH=${BASH_REMATCH[1]}
         BUILDD=${BASH_REMATCH[2]}
         # Right now timestamp is unused
@@ -172,7 +183,7 @@ for file in $(ls -1 ${INCOMING}/*.del ); do
     ARCHKEYRING="${base}/${ARCH}/keyring.gpg"
 
     # Is the key in there?
-    KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys ${KEYID} | grep -c '^pub:')
+    KEYNO=$(gpg ${DEFGPGOPT} --keyring "${ARCHKEYRING}" --with-colons --list-keys ${KEYID} | grep -c '^pub:' || /bin/true )
 
     if [ $KEYNO -eq 1 ]; then
         # Right, exactly one there, lets get rid of it
index cefb68e62bc334507161a4bd31294f88aac63337..f894f1ccb943509c991759432aeb4742fd083e1e 100755 (executable)
@@ -1,8 +1,8 @@
 #!/bin/bash
 #
-# $Id: ddtp_i18n_check.sh 1670 2009-03-31 20:57:49Z nekral-guest $
+# $Id: ddtp_i18n_check.sh 2535 2011-02-19 14:20:52Z nekral-guest $
 # 
-# Copyright (C) 2008, Felipe Augusto van de Wiel <faw@funlabs.org>
+# Copyright (C) 2008, 2011 Felipe Augusto van de Wiel <faw@funlabs.org>
 # Copyright (C) 2008, 2009 Nicolas François <nicolas.francois@centraliens.net>
 #
 # This program is free software; you can redistribute it and/or modify
@@ -25,6 +25,10 @@ DEBUG=0
 # files.
 DRY_RUN=0
 
+# When GEN_IDX=1, we create the Index files.  There is a runtime option
+# to not create/generate the Index file.
+GEN_IDX=1
+
 dists_parent_dir=""
 # If no argument indicates the PACKAGES_LISTS_DIR then use '.'
 PACKAGES_LISTS_DIR=""
@@ -35,6 +39,7 @@ usage () {
        echo "    --debug      Debug mode: do not stop after the first error" >&2
        echo "    --dry-run    Do not generate the compressed version of the " >&2
        echo "                 Translation files">&2
+       echo "    --no-index   Do not generate the Index files" >&2
        exit 1
 }
 
@@ -47,6 +52,9 @@ for opt; do
                "--dry-run")
                        DRY_RUN=1
                        ;;
+               "--no-index")
+                       GEN_IDX=0
+                       ;;
                "-*")
                        usage
                        ;;
@@ -80,21 +88,21 @@ elif [ ! -d "$PACKAGES_LISTS_DIR" ]; then
        usage
 fi
 
-#STABLE="lenny"
+#STABLE="squeeze"
 TESTING="wheezy"
 UNSTABLE="sid"
 
 # Original SHA256SUMS, generated by i18n.debian.net
-SHA256SUMS="SHA256SUMS"
+CHECKSUMS="SHA256SUMS"
 
 # DAK Timestamp
 TIMESTAMP="timestamp"
 
 # These special files must exist on the top of dists_parent_dir
-SPECIAL_FILES="$SHA256SUMS $TIMESTAMP $TIMESTAMP.gpg"
+SPECIAL_FILES="$CHECKSUMS $TIMESTAMP $TIMESTAMP.gpg"
 
 # Temporary working directory. We need a full path to reduce the
-# complexity of checking SHA256SUMS and cleaning/removing TMPDIR
+# complexity of checking CHECKSUMS and cleaning/removing TMPDIR
 TEMP_WORK_DIR=$(mktemp -d -t ddtp_dinstall_tmpdir.XXXXXX)
 cd "$TEMP_WORK_DIR"
 TMP_WORK_DIR=$(pwd)
@@ -105,6 +113,7 @@ unset TEMP_WORK_DIR
 trap_exit () {
        rm -rf "$TMP_WORK_DIR"
        rm -f "$dists_parent_dir"/dists/*/main/i18n/Translation-*.bz2
+       rm -f "$dists_parent_dir"/dists/*/main/i18n/Index
        exit 1
 }
 trap trap_exit EXIT HUP INT QUIT TERM
@@ -315,18 +324,18 @@ for sf in $SPECIAL_FILES; do
        fi
 done
 
-# Comparing SHA256SUMS
-# We don use -c because a file could exist in the directory tree and not in
-# the SHA256SUMS, so we sort the existing SHA256SUMS and we create a new one
+# Comparing CHECKSUMS
+# We don't use -c because a file could exist in the directory tree and not in
+# the CHECKSUMS, so we sort the existing CHECKSUMS and we create a new one
 # already sorted, if cmp fails then files are different and we don't want to
 # continue.
 cd "$dists_parent_dir"
-find dists -type f -print0 |xargs --null sha256sum > "$TMP_WORK_DIR/$SHA256SUMS.new"
-sort "$SHA256SUMS" > "$TMP_WORK_DIR/$SHA256SUMS.sorted"
-sort "$TMP_WORK_DIR/$SHA256SUMS.new" > "$TMP_WORK_DIR/$SHA256SUMS.new.sorted"
-if ! cmp --quiet "$TMP_WORK_DIR/$SHA256SUMS.sorted" "$TMP_WORK_DIR/$SHA256SUMS.new.sorted"; then
-       echo "Failed to compare the SHA256SUMS, they are not identical!" >&2
-       diff -au "$TMP_WORK_DIR/$SHA256SUMS.sorted" "$TMP_WORK_DIR/$SHA256SUMS.new.sorted" >&2
+find dists -type f -print0 |xargs --null sha256sum > "$TMP_WORK_DIR/$CHECKSUMS.new"
+sort "$CHECKSUMS" > "$TMP_WORK_DIR/$CHECKSUMS.sorted"
+sort "$TMP_WORK_DIR/$CHECKSUMS.new" > "$TMP_WORK_DIR/$CHECKSUMS.new.sorted"
+if ! cmp --quiet "$TMP_WORK_DIR/$CHECKSUMS.sorted" "$TMP_WORK_DIR/$CHECKSUMS.new.sorted"; then
+       echo "Failed to compare the $CHECKSUMS, they are not identical!" >&2
+       diff -au "$TMP_WORK_DIR/$CHECKSUMS.sorted" "$TMP_WORK_DIR/$CHECKSUMS.new.sorted" >&2
        exit 1
 fi
 cd "$OLDPWD"
@@ -346,6 +355,14 @@ while read f; do
                if ! is_dirname_okay "$f"; then
                        echo "Wrong directory name: $f" >&2
                        exit 1
+               else
+                       # If the directory name is OK, and if it's name is i18n
+                       # and GEN_IDX is enabled, we generate the header of the
+                       # Index file
+                       if [ "$(basename $f)" = "i18n" -a "$GEN_IDX" = "1" ];
+                       then
+                               echo "SHA1:" > "$f/Index"
+                       fi
                fi
        elif [ -f "$f" ]; then
                # If $f is in $SPECIAL_FILES, we skip to the next loop because
@@ -393,6 +410,17 @@ while read f; do
                        # Now generate the compressed files
                        bzip2 "$f"
                fi
+
+               # Create Index
+               if [ "$GEN_IDX" = "1" ]; then
+                       fbz=${f}.bz2
+                       IDX=$(dirname $f)
+                       tf_name=$(basename $fbz)
+                       tf_sha1=$(sha1sum $fbz)
+                       tf_size=$(du $fbz)
+                       printf ' %s % 7s %s\n' "${tf_sha1% *}" \
+                               "${tf_size%     *}" "${tf_name}" >> "$IDX/Index"
+               fi
        else
                echo "Neither a file or directory: $f" >&2
                exit 1
index 16c482a0ef7e26653b80d64618a9c8c4d3f7f687..e4d3caecaa4a77f4c2380274816d55c39ed43e0f 100644 (file)
@@ -64,7 +64,22 @@ $statusdelay = 30;
 
 # names of the keyring files
 @keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
-              "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg" );
+              "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
 
 # our log file
 $logfile = "$queued_dir/log";
index f04e925df7bbdd1d64967c5066af1298a1a0bf9a..1c9e0c9a9854a2f10b141c9b3d08e3273a9cb249 100644 (file)
@@ -63,7 +63,22 @@ $statusfile = "$incoming/status";
 $statusdelay = 30;
 
 # names of the keyring files
-@keyrings = ( "/srv/backports-master.debian.org/keyrings/keyring.gpg" );
+@keyrings = ( "/srv/backports-master.debian.org/keyrings/keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
 
 # our log file
 $logfile = "$queued_dir/log";
index fe00f0d4c18fd1c44e0365d596c75d5ed0da9af8..1806a1143e0c461b4c86c43326e87afed56239aa 100644 (file)
@@ -63,7 +63,22 @@ $statusfile = "$incoming/status";
 $statusdelay = 30;
 
 # names of the keyring files
-@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg" );
+@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
 
 # our log file
 $logfile = "$queued_dir/log";
index 77cc90d7a264cd0cfea475782ee655dbe73f7990..d6071278402d8311f25748a56861c5dfb8ae980f 100644 (file)
@@ -64,7 +64,22 @@ $statusdelay = 30;
 
 # names of the keyring files
 @keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
-              "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg" );
+              "/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hppa-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ia64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-amd64-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-kfreebsd-i386-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+              "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
 
 # our log file
 $logfile = "$queued_dir/log";