X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fgenerate_releases.py;h=081eb24722d4d2ab5cb6f2eac270a783781c583d;hb=51d8e61bbea0e7aa4913153d7049f263fede0c8f;hp=c359177235810258910b788ea09a85857ebc9cfe;hpb=61231287ac4be27e02244fbc45b212c3ae968da8;p=dak.git diff --git a/dak/generate_releases.py b/dak/generate_releases.py index c3591772..081eb247 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -37,6 +37,7 @@ import stat import time import gzip import bz2 +import errno import apt_pkg import subprocess from tempfile import mkstemp, mkdtemp @@ -120,6 +121,65 @@ class ReleaseWriter(object): def __init__(self, suite): self.suite = suite + def suite_path(self): + """ + Absolute path to the suite-specific files. + """ + cnf = Config() + suite_suffix = cnf.find("Dinstall::SuiteSuffix", "") + + return os.path.join(self.suite.archive.path, 'dists', + self.suite.suite_name, suite_suffix) + + def suite_release_path(self): + """ + Absolute path where Release files are physically stored. + This should be a path that sorts after the dists/ directory. + """ + # TODO: Eventually always create Release in `zzz-dists` to avoid + # special cases. However we don't want to move existing Release files + # for released suites. + # See `create_release_symlinks` below. + if not self.suite.byhash: + return self.suite_path() + + cnf = Config() + suite_suffix = cnf.find("Dinstall::SuiteSuffix", "") + + return os.path.join(self.suite.archive.path, 'zzz-dists', + self.suite.suite_name, suite_suffix) + + def create_release_symlinks(self): + """ + Create symlinks for Release files. + This creates the symlinks for Release files in the `suite_path` + to the actual files in `suite_release_path`. + """ + # TODO: Eventually always create the links. + # See `suite_release_path` above. + if not self.suite.byhash: + return + + relpath = os.path.relpath(self.suite_release_path(), self.suite_path()) + for f in ("Release", "Release.gpg", "InRelease"): + source = os.path.join(relpath, f) + dest = os.path.join(self.suite_path(), f) + if not os.path.islink(dest): + os.unlink(dest) + elif os.readlink(dest) == source: + continue + else: + os.unlink(dest) + os.symlink(source, dest) + + def create_output_directories(self): + for path in (self.suite_path(), self.suite_release_path()): + try: + os.makedirs(path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + def generate_release_files(self): """ Generate Release files for the given suite @@ -151,13 +211,18 @@ class ReleaseWriter(object): # Boolean stuff. If we find it true in database, write out "yes" into the release file boolattrs = ( ('NotAutomatic', 'notautomatic'), - ('ButAutomaticUpgrades', 'butautomaticupgrades') ) + ('ButAutomaticUpgrades', 'butautomaticupgrades'), + ('Acquire-By-Hash', 'byhash'), + ) cnf = Config() suite_suffix = cnf.find("Dinstall::SuiteSuffix", "") - outfile = os.path.join(suite.archive.path, 'dists', suite.suite_name, suite_suffix, "Release") + self.create_output_directories() + self.create_release_symlinks() + + outfile = os.path.join(self.suite_release_path(), "Release") out = open(outfile + ".new", "w") for key, dbfield in attribs: @@ -190,7 +255,7 @@ class ReleaseWriter(object): out.write("Description: %s\n" % suite.description) for comp in components: - for dirpath, dirnames, filenames in os.walk(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix, comp), topdown=True): + for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True): if not re_gensubrelease.match(dirpath): continue @@ -222,7 +287,7 @@ class ReleaseWriter(object): # their checksums to the main Release file oldcwd = os.getcwd() - os.chdir(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix)) + os.chdir(self.suite_path()) hashfuncs = dict(zip([x.upper().replace('UM', 'um') for x in suite.checksums], [getattr(apt_pkg, "%s" % (x)) for x in [x.replace("sum", "") + "sum" for x in suite.checksums]])) @@ -284,6 +349,47 @@ class ReleaseWriter(object): out.close() os.rename(outfile + '.new', outfile) + if suite.byhash: + query = """ + UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP + WHERE suite_id = :id AND unreferenced IS NULL""" + session.execute(query, {'id': suite.suite_id}) + + for filename in fileinfo: + if not os.path.exists(filename): + # probably an uncompressed index we didn't generate + continue + + for h in hashfuncs: + hashfile = os.path.join(os.path.dirname(filename), 'by-hash', h, fileinfo[filename][h]) + query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id" + q = session.execute( + query, + {'p': hashfile, 'id': suite.suite_id}) + if q.rowcount: + session.execute(''' + UPDATE hashfile SET unreferenced = NULL + WHERE path = :p and suite_id = :id''', + {'p': hashfile, 'id': suite.suite_id}) + else: + session.execute(''' + INSERT INTO hashfile (path, suite_id) + VALUES (:p, :id)''', + {'p': hashfile, 'id': suite.suite_id}) + + try: + os.makedirs(os.path.dirname(hashfile)) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + try: + os.link(filename, hashfile) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + + session.commit() + sign_release_dir(suite, os.path.dirname(outfile)) os.chdir(oldcwd)