X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fgenerate_releases.py;h=081eb24722d4d2ab5cb6f2eac270a783781c583d;hb=51d8e61bbea0e7aa4913153d7049f263fede0c8f;hp=6a1bf84e2dee455f163c6be24ab6738c72d4fa52;hpb=4f60e9d90d7b65f37de1137ebcfc34e8b75de7e2;p=dak.git diff --git a/dak/generate_releases.py b/dak/generate_releases.py index 6a1bf84e..081eb247 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -37,7 +37,9 @@ import stat import time import gzip import bz2 +import errno import apt_pkg +import subprocess from tempfile import mkstemp, mkdtemp import commands from sqlalchemy.orm import object_session @@ -48,6 +50,7 @@ from daklib.dak_exceptions import * from daklib.dbconn import * from daklib.config import Config from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS +import daklib.daksubprocess ################################################################################ Logger = None #: Our logging object @@ -68,7 +71,7 @@ Generate the Release files -h, --help show this help and exit -q, --quiet Don't output progress -SUITE can be a space seperated list, e.g. +SUITE can be a space separated list, e.g. --suite=unstable testing """ sys.exit(exit_code) @@ -83,7 +86,7 @@ def sign_release_dir(suite, dirname): if cnf.has_key("Dinstall::SigningPubKeyring"): keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"] - arguments = "--no-options --batch --no-tty --armour" + arguments = "--no-options --batch --no-tty --armour --personal-digest-preferences=SHA256" relname = os.path.join(dirname, 'Release') @@ -95,25 +98,88 @@ def sign_release_dir(suite, dirname): if os.path.exists(inlinedest): os.unlink(inlinedest) - # We can only use one key for inline signing so use the first one in - # the array for consistency - firstkey = True - + defkeyid="" for keyid in suite.signingkeys or []: - defkeyid = "--default-key %s" % keyid - - os.system("gpg %s %s %s --detach-sign <%s >>%s" % - (keyring, defkeyid, arguments, relname, dest)) - - if firstkey: - os.system("gpg %s %s %s --clearsign <%s >>%s" % - (keyring, defkeyid, arguments, relname, inlinedest)) - firstkey = False + defkeyid += "--local-user %s " % keyid + + os.system("gpg %s %s %s --detach-sign <%s >>%s" % + (keyring, defkeyid, arguments, relname, dest)) + os.system("gpg %s %s %s --clearsign <%s >>%s" % + (keyring, defkeyid, arguments, relname, inlinedest)) + +class XzFile(object): + def __init__(self, filename, mode='r'): + self.filename = filename + def read(self): + cmd = ("xz", "-d") + with open(self.filename, 'r') as stdin: + process = daklib.daksubprocess.Popen(cmd, stdin=stdin, stdout=subprocess.PIPE) + (stdout, stderr) = process.communicate() + return stdout class ReleaseWriter(object): def __init__(self, suite): self.suite = suite + def suite_path(self): + """ + Absolute path to the suite-specific files. + """ + cnf = Config() + suite_suffix = cnf.find("Dinstall::SuiteSuffix", "") + + return os.path.join(self.suite.archive.path, 'dists', + self.suite.suite_name, suite_suffix) + + def suite_release_path(self): + """ + Absolute path where Release files are physically stored. + This should be a path that sorts after the dists/ directory. + """ + # TODO: Eventually always create Release in `zzz-dists` to avoid + # special cases. However we don't want to move existing Release files + # for released suites. + # See `create_release_symlinks` below. + if not self.suite.byhash: + return self.suite_path() + + cnf = Config() + suite_suffix = cnf.find("Dinstall::SuiteSuffix", "") + + return os.path.join(self.suite.archive.path, 'zzz-dists', + self.suite.suite_name, suite_suffix) + + def create_release_symlinks(self): + """ + Create symlinks for Release files. + This creates the symlinks for Release files in the `suite_path` + to the actual files in `suite_release_path`. + """ + # TODO: Eventually always create the links. + # See `suite_release_path` above. + if not self.suite.byhash: + return + + relpath = os.path.relpath(self.suite_release_path(), self.suite_path()) + for f in ("Release", "Release.gpg", "InRelease"): + source = os.path.join(relpath, f) + dest = os.path.join(self.suite_path(), f) + if not os.path.islink(dest): + os.unlink(dest) + elif os.readlink(dest) == source: + continue + else: + os.unlink(dest) + os.symlink(source, dest) + + def create_output_directories(self): + for path in (self.suite_path(), self.suite_release_path()): + try: + os.makedirs(path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + def generate_release_files(self): """ Generate Release files for the given suite @@ -131,9 +197,11 @@ class ReleaseWriter(object): # fill them in attribs = ( ('Origin', 'origin'), ('Label', 'label'), - ('Suite', 'suite_name'), + ('Suite', 'release_suite_output'), ('Version', 'version'), - ('Codename', 'codename') ) + ('Codename', 'codename'), + ('Changelogs', 'changelog_url'), + ) # A "Sub" Release file has slightly different fields subattribs = ( ('Archive', 'suite_name'), @@ -143,26 +211,27 @@ class ReleaseWriter(object): # Boolean stuff. If we find it true in database, write out "yes" into the release file boolattrs = ( ('NotAutomatic', 'notautomatic'), - ('ButAutomaticUpgrades', 'butautomaticupgrades') ) + ('ButAutomaticUpgrades', 'butautomaticupgrades'), + ('Acquire-By-Hash', 'byhash'), + ) cnf = Config() suite_suffix = cnf.find("Dinstall::SuiteSuffix", "") - outfile = os.path.join(suite.archive.path, 'dists', suite.suite_name, suite_suffix, "Release") + self.create_output_directories() + self.create_release_symlinks() + + outfile = os.path.join(self.suite_release_path(), "Release") out = open(outfile + ".new", "w") for key, dbfield in attribs: - if getattr(suite, dbfield) is not None: - # TEMPORARY HACK HACK HACK until we change the way we store the suite names etc - if key == 'Suite' and getattr(suite, dbfield) == 'squeeze-updates': - out.write("Suite: oldstable-updates\n") - elif key == 'Suite' and getattr(suite, dbfield) == 'wheezy-updates': - out.write("Suite: stable-updates\n") - elif key == 'Suite' and getattr(suite, dbfield) == 'jessie-updates': - out.write("Suite: testing-updates\n") - else: - out.write("%s: %s\n" % (key, getattr(suite, dbfield))) + # Hack to skip NULL Version fields as we used to do this + # We should probably just always ignore anything which is None + if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None: + continue + + out.write("%s: %s\n" % (key, getattr(suite, dbfield))) out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time())))) @@ -176,7 +245,7 @@ class ReleaseWriter(object): out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures]))) - components = [ c.component_name for c in session.query(Component) ] + components = [ c.component_name for c in suite.components ] out.write("Components: %s\n" % (" ".join(components))) @@ -186,7 +255,7 @@ class ReleaseWriter(object): out.write("Description: %s\n" % suite.description) for comp in components: - for dirpath, dirnames, filenames in os.walk(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix, comp), topdown=True): + for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True): if not re_gensubrelease.match(dirpath): continue @@ -218,11 +287,10 @@ class ReleaseWriter(object): # their checksums to the main Release file oldcwd = os.getcwd() - os.chdir(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix)) + os.chdir(self.suite_path()) - hashfuncs = { 'MD5Sum' : apt_pkg.md5sum, - 'SHA1' : apt_pkg.sha1sum, - 'SHA256' : apt_pkg.sha256sum } + hashfuncs = dict(zip([x.upper().replace('UM', 'um') for x in suite.checksums], + [getattr(apt_pkg, "%s" % (x)) for x in [x.replace("sum", "") + "sum" for x in suite.checksums]])) fileinfo = {} @@ -244,10 +312,12 @@ class ReleaseWriter(object): # If we find a file for which we have a compressed version and # haven't yet seen the uncompressed one, store the possibility # for future use - if entry.endswith(".gz") and entry[:-3] not in uncompnotseen.keys(): + if entry.endswith(".gz") and filename[:-3] not in uncompnotseen: uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename) - elif entry.endswith(".bz2") and entry[:-4] not in uncompnotseen.keys(): + elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen: uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename) + elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen: + uncompnotseen[filename[:-3]] = (XzFile, filename) fileinfo[filename]['len'] = len(contents) @@ -257,12 +327,7 @@ class ReleaseWriter(object): for filename, comp in uncompnotseen.items(): # If we've already seen the uncompressed file, we don't # need to do anything again - if filename in fileinfo.keys(): - continue - - # Skip uncompressed Contents files as they're huge, take ages to - # checksum and we checksum the compressed ones anyways - if os.path.basename(filename).startswith("Contents"): + if filename in fileinfo: continue fileinfo[filename] = {} @@ -284,6 +349,47 @@ class ReleaseWriter(object): out.close() os.rename(outfile + '.new', outfile) + if suite.byhash: + query = """ + UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP + WHERE suite_id = :id AND unreferenced IS NULL""" + session.execute(query, {'id': suite.suite_id}) + + for filename in fileinfo: + if not os.path.exists(filename): + # probably an uncompressed index we didn't generate + continue + + for h in hashfuncs: + hashfile = os.path.join(os.path.dirname(filename), 'by-hash', h, fileinfo[filename][h]) + query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id" + q = session.execute( + query, + {'p': hashfile, 'id': suite.suite_id}) + if q.rowcount: + session.execute(''' + UPDATE hashfile SET unreferenced = NULL + WHERE path = :p and suite_id = :id''', + {'p': hashfile, 'id': suite.suite_id}) + else: + session.execute(''' + INSERT INTO hashfile (path, suite_id) + VALUES (:p, :id)''', + {'p': hashfile, 'id': suite.suite_id}) + + try: + os.makedirs(os.path.dirname(hashfile)) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + try: + os.link(filename, hashfile) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + + session.commit() + sign_release_dir(suite, os.path.dirname(outfile)) os.chdir(oldcwd)