#!/usr/bin/env python
-# Create all the Release files
+"""
+Create all the Release files
-# Copyright (C) 2001, 2002, 2006 Anthony Towns <ajt@debian.org>
+@contact: Debian FTPMaster <ftpmaster@debian.org>
+@copyright: 2011 Joerg Jaspert <joerg@debian.org>
+@copyright: 2011 Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
+
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-# ``Bored now''
+################################################################################
+
+# <mhy> I wish they wouldnt leave biscuits out, thats just tempting. Damnit.
################################################################################
-import sys, os, popen2, tempfile, stat, time, pg
+import sys
+import os
+import os.path
+import stat
+import time
+import gzip
+import bz2
+import errno
import apt_pkg
-from daklib import utils
+import subprocess
+from tempfile import mkstemp, mkdtemp
+import commands
+from sqlalchemy.orm import object_session
+
+from daklib import utils, daklog
+from daklib.regexes import re_gensubrelease, re_includeinrelease
from daklib.dak_exceptions import *
+from daklib.dbconn import *
+from daklib.config import Config
+from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS
+import daklib.daksubprocess
################################################################################
-
-Cnf = None
-projectB = None
-out = None
-AptCnf = None
+Logger = None #: Our logging object
################################################################################
def usage (exit_code=0):
- print """Usage: dak generate-releases [OPTION]... [SUITE]...
-Generate Release files (for SUITE).
+ """ Usage information"""
- -h, --help show this help and exit
- -a, --apt-conf FILE use FILE instead of default apt.conf
- -f, --force-touch ignore Untouchable directives in dak.conf
+ print """Usage: dak generate-releases [OPTIONS]
+Generate the Release files
-If no SUITE is given Release files are generated for all suites."""
+ -a, --archive=ARCHIVE process suites in ARCHIVE
+ -s, --suite=SUITE(s) process this suite
+ Default: All suites not marked 'untouchable'
+ -f, --force Allow processing of untouchable suites
+ CAREFUL: Only to be used at (point) release time!
+ -h, --help show this help and exit
+ -q, --quiet Don't output progress
+SUITE can be a space separated list, e.g.
+ --suite=unstable testing
+ """
sys.exit(exit_code)
-################################################################################
-
-def add_tiffani (files, path, indexstem):
- index = "%s.diff/Index" % (indexstem)
- filepath = "%s/%s" % (path, index)
- if os.path.exists(filepath):
- #print "ALERT: there was a tiffani file %s" % (filepath)
- files.append(index)
-
-def compressnames (tree,type,file):
- compress = AptCnf.get("%s::%s::Compress" % (tree,type), AptCnf.get("Default::%s::Compress" % (type), ". gzip"))
- result = []
- cl = compress.split()
- uncompress = ("." not in cl)
- for mode in compress.split():
- if mode == ".":
- result.append(file)
- elif mode == "gzip":
- if uncompress:
- result.append("<zcat/.gz>" + file)
- uncompress = 0
- result.append(file + ".gz")
- elif mode == "bzip2":
- if uncompress:
- result.append("<bzcat/.bz2>" + file)
- uncompress = 0
- result.append(file + ".bz2")
- return result
-
-def create_temp_file (cmd):
- f = tempfile.TemporaryFile()
- r = popen2.popen2(cmd)
- r[1].close()
- r = r[0]
- size = 0
- while 1:
- x = r.readline()
- if not x:
- r.close()
- del x,r
- break
- f.write(x)
- size += len(x)
- f.flush()
- f.seek(0)
- return (size, f)
-
-def print_md5sha_files (tree, files, hashop):
- path = Cnf["Dir::Root"] + tree + "/"
- for name in files:
- try:
- if name[0] == "<":
- j = name.index("/")
- k = name.index(">")
- (cat, ext, name) = (name[1:j], name[j+1:k], name[k+1:])
- (size, file_handle) = create_temp_file("%s %s%s%s" %
- (cat, path, name, ext))
+########################################################################
+
+def sign_release_dir(suite, dirname):
+ cnf = Config()
+
+ if cnf.has_key("Dinstall::SigningKeyring"):
+ keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
+ if cnf.has_key("Dinstall::SigningPubKeyring"):
+ keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
+
+ arguments = "--no-options --batch --no-tty --armour --personal-digest-preferences=SHA256"
+
+ relname = os.path.join(dirname, 'Release')
+
+ dest = os.path.join(dirname, 'Release.gpg')
+ if os.path.exists(dest):
+ os.unlink(dest)
+
+ inlinedest = os.path.join(dirname, 'InRelease')
+ if os.path.exists(inlinedest):
+ os.unlink(inlinedest)
+
+ defkeyid=""
+ for keyid in suite.signingkeys or []:
+ defkeyid += "--local-user %s " % keyid
+
+ os.system("gpg %s %s %s --detach-sign <%s >>%s" %
+ (keyring, defkeyid, arguments, relname, dest))
+ os.system("gpg %s %s %s --clearsign <%s >>%s" %
+ (keyring, defkeyid, arguments, relname, inlinedest))
+
+class XzFile(object):
+ def __init__(self, filename, mode='r'):
+ self.filename = filename
+ def read(self):
+ cmd = ("xz", "-d")
+ with open(self.filename, 'r') as stdin:
+ process = daklib.daksubprocess.Popen(cmd, stdin=stdin, stdout=subprocess.PIPE)
+ (stdout, stderr) = process.communicate()
+ return stdout
+
+
+class HashFunc(object):
+ def __init__(self, release_field, func, db_name):
+ self.release_field = release_field
+ self.func = func
+ self.db_name = db_name
+
+RELEASE_HASHES = [
+ HashFunc('MD5Sum', apt_pkg.md5sum, 'md5'),
+ HashFunc('SHA1', apt_pkg.sha1sum, 'sha1'),
+ HashFunc('SHA256', apt_pkg.sha256sum, 'sha256'),
+]
+
+
+class ReleaseWriter(object):
+ def __init__(self, suite):
+ self.suite = suite
+
+ def suite_path(self):
+ """
+ Absolute path to the suite-specific files.
+ """
+ cnf = Config()
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ return os.path.join(self.suite.archive.path, 'dists',
+ self.suite.suite_name, suite_suffix)
+
+ def suite_release_path(self):
+ """
+ Absolute path where Release files are physically stored.
+ This should be a path that sorts after the dists/ directory.
+ """
+ # TODO: Eventually always create Release in `zzz-dists` to avoid
+ # special cases. However we don't want to move existing Release files
+ # for released suites.
+ # See `create_release_symlinks` below.
+ if not self.suite.byhash:
+ return self.suite_path()
+
+ cnf = Config()
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ return os.path.join(self.suite.archive.path, 'zzz-dists',
+ self.suite.suite_name, suite_suffix)
+
+ def create_release_symlinks(self):
+ """
+ Create symlinks for Release files.
+ This creates the symlinks for Release files in the `suite_path`
+ to the actual files in `suite_release_path`.
+ """
+ # TODO: Eventually always create the links.
+ # See `suite_release_path` above.
+ if not self.suite.byhash:
+ return
+
+ relpath = os.path.relpath(self.suite_release_path(), self.suite_path())
+ for f in ("Release", "Release.gpg", "InRelease"):
+ source = os.path.join(relpath, f)
+ dest = os.path.join(self.suite_path(), f)
+ if not os.path.islink(dest):
+ os.unlink(dest)
+ elif os.readlink(dest) == source:
+ continue
else:
- size = os.stat(path + name)[stat.ST_SIZE]
- file_handle = utils.open_file(path + name)
- except CantOpenError:
- print "ALERT: Couldn't open " + path + name
- else:
- hash = hashop(file_handle)
- file_handle.close()
- out.write(" %s %8d %s\n" % (hash, size, name))
+ os.unlink(dest)
+ os.symlink(source, dest)
-def print_md5_files (tree, files):
- print_md5sha_files (tree, files, apt_pkg.md5sum)
+ def create_output_directories(self):
+ for path in (self.suite_path(), self.suite_release_path()):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
-def print_sha1_files (tree, files):
- print_md5sha_files (tree, files, apt_pkg.sha1sum)
+ def generate_release_files(self):
+ """
+ Generate Release files for the given suite
-def print_sha256_files (tree, files):
- print_md5sha_files (tree, files, apt_pkg.sha256sum)
+ @type suite: string
+ @param suite: Suite name
+ """
+
+ suite = self.suite
+ session = object_session(suite)
+
+ architectures = get_suite_architectures(suite.suite_name, skipall=True, skipsrc=True, session=session)
+
+ # Attribs contains a tuple of field names and the database names to use to
+ # fill them in
+ attribs = ( ('Origin', 'origin'),
+ ('Label', 'label'),
+ ('Suite', 'release_suite_output'),
+ ('Version', 'version'),
+ ('Codename', 'codename'),
+ ('Changelogs', 'changelog_url'),
+ )
+
+ # A "Sub" Release file has slightly different fields
+ subattribs = ( ('Archive', 'suite_name'),
+ ('Origin', 'origin'),
+ ('Label', 'label'),
+ ('Version', 'version') )
+
+ # Boolean stuff. If we find it true in database, write out "yes" into the release file
+ boolattrs = ( ('NotAutomatic', 'notautomatic'),
+ ('ButAutomaticUpgrades', 'butautomaticupgrades'),
+ ('Acquire-By-Hash', 'byhash'),
+ )
+
+ cnf = Config()
+
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ self.create_output_directories()
+ self.create_release_symlinks()
+
+ outfile = os.path.join(self.suite_release_path(), "Release")
+ out = open(outfile + ".new", "w")
+
+ for key, dbfield in attribs:
+ # Hack to skip NULL Version fields as we used to do this
+ # We should probably just always ignore anything which is None
+ if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None:
+ continue
+
+ out.write("%s: %s\n" % (key, getattr(suite, dbfield)))
+
+ out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
+
+ if suite.validtime:
+ validtime=float(suite.validtime)
+ out.write("Valid-Until: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()+validtime))))
+
+ for key, dbfield in boolattrs:
+ if getattr(suite, dbfield, False):
+ out.write("%s: yes\n" % (key))
+
+ out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures])))
+
+ components = [ c.component_name for c in suite.components ]
+
+ out.write("Components: %s\n" % (" ".join(components)))
+
+ # For exact compatibility with old g-r, write out Description here instead
+ # of with the rest of the DB fields above
+ if getattr(suite, 'description') is not None:
+ out.write("Description: %s\n" % suite.description)
+
+ for comp in components:
+ for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True):
+ if not re_gensubrelease.match(dirpath):
+ continue
+
+ subfile = os.path.join(dirpath, "Release")
+ subrel = open(subfile + '.new', "w")
+
+ for key, dbfield in subattribs:
+ if getattr(suite, dbfield) is not None:
+ subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))
+
+ for key, dbfield in boolattrs:
+ if getattr(suite, dbfield, False):
+ subrel.write("%s: yes\n" % (key))
+
+ subrel.write("Component: %s%s\n" % (suite_suffix, comp))
+
+ # Urgh, but until we have all the suite/component/arch stuff in the DB,
+ # this'll have to do
+ arch = os.path.split(dirpath)[-1]
+ if arch.startswith('binary-'):
+ arch = arch[7:]
+
+ subrel.write("Architecture: %s\n" % (arch))
+ subrel.close()
+
+ os.rename(subfile + '.new', subfile)
+
+ # Now that we have done the groundwork, we want to get off and add the files with
+ # their checksums to the main Release file
+ oldcwd = os.getcwd()
+
+ os.chdir(self.suite_path())
+
+ hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]
+
+ fileinfo = {}
+
+ uncompnotseen = {}
+
+ for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
+ for entry in filenames:
+ # Skip things we don't want to include
+ if not re_includeinrelease.match(entry):
+ continue
+
+ if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
+ continue
+
+ filename = os.path.join(dirpath.lstrip('./'), entry)
+ fileinfo[filename] = {}
+ contents = open(filename, 'r').read()
+
+ # If we find a file for which we have a compressed version and
+ # haven't yet seen the uncompressed one, store the possibility
+ # for future use
+ if entry.endswith(".gz") and filename[:-3] not in uncompnotseen:
+ uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
+ elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen:
+ uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
+ elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen:
+ uncompnotseen[filename[:-3]] = (XzFile, filename)
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf in hashes:
+ fileinfo[filename][hf.release_field] = hf.func(contents)
+
+ for filename, comp in uncompnotseen.items():
+ # If we've already seen the uncompressed file, we don't
+ # need to do anything again
+ if filename in fileinfo:
+ continue
+
+ fileinfo[filename] = {}
+
+ # File handler is comp[0], filename of compressed file is comp[1]
+ contents = comp[0](comp[1], 'r').read()
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf in hashes:
+ fileinfo[filename][hf.release_field] = hf.func(contents)
+
+
+ for field in sorted(h.release_field for h in hashes):
+ out.write('%s:\n' % field)
+ for filename in sorted(fileinfo.keys()):
+ out.write(" %s %8d %s\n" % (fileinfo[filename][field], fileinfo[filename]['len'], filename))
+
+ out.close()
+ os.rename(outfile + '.new', outfile)
+
+ if suite.byhash:
+ query = """
+ UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP
+ WHERE suite_id = :id AND unreferenced IS NULL"""
+ session.execute(query, {'id': suite.suite_id})
+
+ for filename in fileinfo:
+ if not os.path.exists(filename):
+ # probably an uncompressed index we didn't generate
+ continue
+
+ for h in hashes:
+ field = h.release_field
+ hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field])
+ query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id"
+ q = session.execute(
+ query,
+ {'p': hashfile, 'id': suite.suite_id})
+ if q.rowcount:
+ session.execute('''
+ UPDATE hashfile SET unreferenced = NULL
+ WHERE path = :p and suite_id = :id''',
+ {'p': hashfile, 'id': suite.suite_id})
+ else:
+ session.execute('''
+ INSERT INTO hashfile (path, suite_id)
+ VALUES (:p, :id)''',
+ {'p': hashfile, 'id': suite.suite_id})
+ session.commit()
+
+ for filename in fileinfo:
+ if not os.path.exists(filename):
+ # probably an uncompressed index we didn't generate
+ continue
+
+ for h in hashes:
+ field = h.release_field
+ hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field])
+ try:
+ os.makedirs(os.path.dirname(hashfile))
+ except OSError as exc:
+ if exc.errno != errno.EEXIST:
+ raise
+ try:
+ os.link(filename, hashfile)
+ except OSError as exc:
+ if exc.errno != errno.EEXIST:
+ raise
+
+
+ sign_release_dir(suite, os.path.dirname(outfile))
+
+ os.chdir(oldcwd)
+
+ return
-################################################################################
def main ():
- global Cnf, AptCnf, projectB, out
- out = sys.stdout
+ global Logger
+
+ cnf = Config()
- Cnf = utils.get_conf()
+ for i in ["Help", "Suite", "Force", "Quiet"]:
+ if not cnf.has_key("Generate-Releases::Options::%s" % (i)):
+ cnf["Generate-Releases::Options::%s" % (i)] = ""
Arguments = [('h',"help","Generate-Releases::Options::Help"),
- ('a',"apt-conf","Generate-Releases::Options::Apt-Conf", "HasArg"),
- ('f',"force-touch","Generate-Releases::Options::Force-Touch"),
- ]
- for i in [ "help", "apt-conf", "force-touch" ]:
- if not Cnf.has_key("Generate-Releases::Options::%s" % (i)):
- Cnf["Generate-Releases::Options::%s" % (i)] = ""
+ ('a','archive','Generate-Releases::Options::Archive','HasArg'),
+ ('s',"suite","Generate-Releases::Options::Suite"),
+ ('f',"force","Generate-Releases::Options::Force"),
+ ('q',"quiet","Generate-Releases::Options::Quiet"),
+ ('o','option','','ArbItem')]
- suites = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
- Options = Cnf.SubTree("Generate-Releases::Options")
+ suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
+ Options = cnf.subtree("Generate-Releases::Options")
if Options["Help"]:
usage()
- if not Options["Apt-Conf"]:
- Options["Apt-Conf"] = utils.which_apt_conf_file()
-
- AptCnf = apt_pkg.newConfiguration()
- apt_pkg.ReadConfigFileISC(AptCnf, Options["Apt-Conf"])
+ Logger = daklog.Logger('generate-releases')
+ pool = DakProcessPool()
- projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
+ session = DBConn().session()
- if not suites:
- suites = Cnf.SubTree("Suite").List()
+ if Options["Suite"]:
+ suites = []
+ for s in suite_names:
+ suite = get_suite(s.lower(), session)
+ if suite:
+ suites.append(suite)
+ else:
+ print "cannot find suite %s" % s
+ Logger.log(['cannot find suite %s' % s])
+ else:
+ query = session.query(Suite).filter(Suite.untouchable == False)
+ if 'Archive' in Options:
+ query = query.join(Suite.archive).filter(Archive.archive_name==Options['Archive'])
+ suites = query.all()
+
+ broken=[]
+
+ for s in suites:
+ # Setup a multiprocessing Pool. As many workers as we have CPU cores.
+ if s.untouchable and not Options["Force"]:
+ print "Skipping %s (untouchable)" % s.suite_name
+ continue
- for suite in suites:
- print "Processing: " + suite
- SuiteBlock = Cnf.SubTree("Suite::" + suite)
+ if not Options["Quiet"]:
+ print "Processing %s" % s.suite_name
+ Logger.log(['Processing release file for Suite: %s' % (s.suite_name)])
+ pool.apply_async(generate_helper, (s.suite_id, ))
- if SuiteBlock.has_key("Untouchable") and not Options["Force-Touch"]:
- print "Skipping: " + suite + " (untouchable)"
- continue
+ # No more work will be added to our pool, close it and then wait for all to finish
+ pool.close()
+ pool.join()
- suite = suite.lower()
-
- origin = SuiteBlock["Origin"]
- label = SuiteBlock.get("Label", origin)
- codename = SuiteBlock.get("CodeName", "")
-
- version = ""
- description = ""
-
- q = projectB.query("SELECT version, description FROM suite WHERE suite_name = '%s'" % (suite))
- qs = q.getresult()
- if len(qs) == 1:
- if qs[0][0] != "-": version = qs[0][0]
- if qs[0][1]: description = qs[0][1]
-
- if SuiteBlock.has_key("NotAutomatic"):
- notautomatic = "yes"
- else:
- notautomatic = ""
-
- if SuiteBlock.has_key("Components"):
- components = SuiteBlock.ValueList("Components")
- else:
- components = []
-
- suite_suffix = Cnf.Find("Dinstall::SuiteSuffix")
- if components and suite_suffix:
- longsuite = suite + "/" + suite_suffix
- else:
- longsuite = suite
-
- tree = SuiteBlock.get("Tree", "dists/%s" % (longsuite))
-
- if AptCnf.has_key("tree::%s" % (tree)):
- pass
- elif AptCnf.has_key("bindirectory::%s" % (tree)):
- pass
- else:
- aptcnf_filename = os.path.basename(utils.which_apt_conf_file())
- print "ALERT: suite %s not in %s, nor untouchable!" % (suite, aptcnf_filename)
- continue
+ retcode = pool.overall_status()
- print Cnf["Dir::Root"] + tree + "/Release"
- out = open(Cnf["Dir::Root"] + tree + "/Release", "w")
+ if retcode > 0:
+ # TODO: CENTRAL FUNCTION FOR THIS / IMPROVE LOGGING
+ Logger.log(['Release file generation broken: %s' % (','.join([str(x[1]) for x in pool.results]))])
- out.write("Origin: %s\n" % (origin))
- out.write("Label: %s\n" % (label))
- out.write("Suite: %s\n" % (suite))
- if version != "":
- out.write("Version: %s\n" % (version))
- if codename != "":
- out.write("Codename: %s\n" % (codename))
- out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
- if notautomatic != "":
- out.write("NotAutomatic: %s\n" % (notautomatic))
- out.write("Architectures: %s\n" % (" ".join(filter(utils.real_arch, SuiteBlock.ValueList("Architectures")))))
- if components:
- out.write("Components: %s\n" % (" ".join(components)))
-
- if description:
- out.write("Description: %s\n" % (description))
-
- files = []
-
- if AptCnf.has_key("tree::%s" % (tree)):
- for sec in AptCnf["tree::%s::Sections" % (tree)].split():
- for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
- if arch == "source":
- filepath = "%s/%s/Sources" % (sec, arch)
- for file in compressnames("tree::%s" % (tree), "Sources", filepath):
- files.append(file)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
- else:
- disks = "%s/disks-%s" % (sec, arch)
- diskspath = Cnf["Dir::Root"]+tree+"/"+disks
- if os.path.exists(diskspath):
- for dir in os.listdir(diskspath):
- if os.path.exists("%s/%s/md5sum.txt" % (diskspath, dir)):
- files.append("%s/%s/md5sum.txt" % (disks, dir))
-
- filepath = "%s/binary-%s/Packages" % (sec, arch)
- for file in compressnames("tree::%s" % (tree), "Packages", filepath):
- files.append(file)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
-
- if arch == "source":
- rel = "%s/%s/Release" % (sec, arch)
- else:
- rel = "%s/binary-%s/Release" % (sec, arch)
- relpath = Cnf["Dir::Root"]+tree+"/"+rel
+ Logger.close()
- try:
- if os.access(relpath, os.F_OK):
- if os.stat(relpath).st_nlink > 1:
- os.unlink(relpath)
- release = open(relpath, "w")
- #release = open(longsuite.replace("/","_") + "_" + arch + "_" + sec + "_Release", "w")
- except IOError:
- utils.fubar("Couldn't write to " + relpath)
-
- release.write("Archive: %s\n" % (suite))
- if version != "":
- release.write("Version: %s\n" % (version))
- if suite_suffix:
- release.write("Component: %s/%s\n" % (suite_suffix,sec))
- else:
- release.write("Component: %s\n" % (sec))
- release.write("Origin: %s\n" % (origin))
- release.write("Label: %s\n" % (label))
- if notautomatic != "":
- release.write("NotAutomatic: %s\n" % (notautomatic))
- release.write("Architecture: %s\n" % (arch))
- release.close()
- files.append(rel)
-
- if AptCnf.has_key("tree::%s/main" % (tree)):
- for dis in ["main", "contrib", "non-free"]:
- if not AptCnf.has_key("tree::%s/%s" % (tree, dis)): continue
- sec = AptCnf["tree::%s/%s::Sections" % (tree,dis)].split()[0]
- if sec != "debian-installer":
- print "ALERT: weird non debian-installer section in %s" % (tree)
-
- for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
- if arch != "source": # always true
- for file in compressnames("tree::%s/%s" % (tree,dis),
- "Packages",
- "%s/%s/binary-%s/Packages" % (dis, sec, arch)):
- files.append(file)
- elif AptCnf.has_key("tree::%s::FakeDI" % (tree)):
- usetree = AptCnf["tree::%s::FakeDI" % (tree)]
- sec = AptCnf["tree::%s/main::Sections" % (usetree)].split()[0]
- if sec != "debian-installer":
- print "ALERT: weird non debian-installer section in %s" % (usetree)
-
- for arch in AptCnf["tree::%s/main::Architectures" % (usetree)].split():
- if arch != "source": # always true
- for file in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)):
- files.append(file)
-
- elif AptCnf.has_key("bindirectory::%s" % (tree)):
- for file in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]):
- files.append(file.replace(tree+"/","",1))
- for file in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]):
- files.append(file.replace(tree+"/","",1))
- else:
- print "ALERT: no tree/bindirectory for %s" % (tree)
-
- out.write("MD5Sum:\n")
- print_md5_files(tree, files)
- out.write("SHA1:\n")
- print_sha1_files(tree, files)
- out.write("SHA256:\n")
- print_sha256_files(tree, files)
+ sys.exit(retcode)
- out.close()
- if Cnf.has_key("Dinstall::SigningKeyring"):
- keyring = "--secret-keyring \"%s\"" % Cnf["Dinstall::SigningKeyring"]
- if Cnf.has_key("Dinstall::SigningPubKeyring"):
- keyring += " --keyring \"%s\"" % Cnf["Dinstall::SigningPubKeyring"]
-
- arguments = "--no-options --batch --no-tty --armour"
- if Cnf.has_key("Dinstall::SigningKeyIds"):
- signkeyids = Cnf["Dinstall::SigningKeyIds"].split()
- else:
- signkeyids = [""]
+def generate_helper(suite_id):
+ '''
+ This function is called in a new subprocess.
+ '''
+ session = DBConn().session()
+ suite = Suite.get(suite_id, session)
- dest = Cnf["Dir::Root"] + tree + "/Release.gpg"
- if os.path.exists(dest):
- os.unlink(dest)
+ # We allow the process handler to catch and deal with any exceptions
+ rw = ReleaseWriter(suite)
+ rw.generate_release_files()
- for keyid in signkeyids:
- if keyid != "": defkeyid = "--default-key %s" % keyid
- else: defkeyid = ""
- os.system("gpg %s %s %s --detach-sign <%s >>%s" %
- (keyring, defkeyid, arguments,
- Cnf["Dir::Root"] + tree + "/Release", dest))
+ return (PROC_STATUS_SUCCESS, 'Release file written for %s' % suite.suite_name)
#######################################################################################