X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fgenerate_releases.py;h=ae3577f3db998d17e7cad6837e58673aa113bcc8;hb=d6f184c345b49d95274f245c78d0a7976dcbc115;hp=081eb24722d4d2ab5cb6f2eac270a783781c583d;hpb=51d8e61bbea0e7aa4913153d7049f263fede0c8f;p=dak.git diff --git a/dak/generate_releases.py b/dak/generate_releases.py index 081eb247..ae3577f3 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -117,6 +117,20 @@ class XzFile(object): (stdout, stderr) = process.communicate() return stdout + +class HashFunc(object): + def __init__(self, release_field, func, db_name): + self.release_field = release_field + self.func = func + self.db_name = db_name + +RELEASE_HASHES = [ + HashFunc('MD5Sum', apt_pkg.md5sum, 'md5'), + HashFunc('SHA1', apt_pkg.sha1sum, 'sha1'), + HashFunc('SHA256', apt_pkg.sha256sum, 'sha256'), +] + + class ReleaseWriter(object): def __init__(self, suite): self.suite = suite @@ -289,8 +303,7 @@ class ReleaseWriter(object): os.chdir(self.suite_path()) - hashfuncs = dict(zip([x.upper().replace('UM', 'um') for x in suite.checksums], - [getattr(apt_pkg, "%s" % (x)) for x in [x.replace("sum", "") + "sum" for x in suite.checksums]])) + hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums] fileinfo = {} @@ -321,8 +334,8 @@ class ReleaseWriter(object): fileinfo[filename]['len'] = len(contents) - for hf, func in hashfuncs.items(): - fileinfo[filename][hf] = func(contents) + for hf in hashes: + fileinfo[filename][hf.release_field] = hf.func(contents) for filename, comp in uncompnotseen.items(): # If we've already seen the uncompressed file, we don't @@ -337,31 +350,32 @@ class ReleaseWriter(object): fileinfo[filename]['len'] = len(contents) - for hf, func in hashfuncs.items(): - fileinfo[filename][hf] = func(contents) + for hf in hashes: + fileinfo[filename][hf.release_field] = hf.func(contents) - for h in sorted(hashfuncs.keys()): - out.write('%s:\n' % h) + for field in sorted(h.release_field for h in hashes): + out.write('%s:\n' % field) for filename in sorted(fileinfo.keys()): - out.write(" %s %8d %s\n" % (fileinfo[filename][h], fileinfo[filename]['len'], filename)) + out.write(" %s %8d %s\n" % (fileinfo[filename][field], fileinfo[filename]['len'], filename)) out.close() os.rename(outfile + '.new', outfile) - if suite.byhash: - query = """ - UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP - WHERE suite_id = :id AND unreferenced IS NULL""" - session.execute(query, {'id': suite.suite_id}) + query = """ + UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP + WHERE suite_id = :id AND unreferenced IS NULL""" + session.execute(query, {'id': suite.suite_id}) + if suite.byhash: for filename in fileinfo: if not os.path.exists(filename): # probably an uncompressed index we didn't generate continue - for h in hashfuncs: - hashfile = os.path.join(os.path.dirname(filename), 'by-hash', h, fileinfo[filename][h]) + for h in hashes: + field = h.release_field + hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field]) query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id" q = session.execute( query, @@ -376,7 +390,17 @@ class ReleaseWriter(object): INSERT INTO hashfile (path, suite_id) VALUES (:p, :id)''', {'p': hashfile, 'id': suite.suite_id}) + session.commit() + + if suite.byhash: + for filename in fileinfo: + if not os.path.exists(filename): + # probably an uncompressed index we didn't generate + continue + for h in hashes: + field = h.release_field + hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field]) try: os.makedirs(os.path.dirname(hashfile)) except OSError as exc: @@ -388,7 +412,6 @@ class ReleaseWriter(object): if exc.errno != errno.EEXIST: raise - session.commit() sign_release_dir(suite, os.path.dirname(outfile))