X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fgenerate_releases.py;h=dc46b24b54a09ce235d7531b5bf9e65b3badfec1;hb=2abbcc53125eb1e9288698ca3a734016b601453c;hp=081eb24722d4d2ab5cb6f2eac270a783781c583d;hpb=51d8e61bbea0e7aa4913153d7049f263fede0c8f;p=dak.git diff --git a/dak/generate_releases.py b/dak/generate_releases.py index 081eb247..dc46b24b 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -117,6 +117,20 @@ class XzFile(object): (stdout, stderr) = process.communicate() return stdout + +class HashFunc(object): + def __init__(self, release_field, func, db_name): + self.release_field = release_field + self.func = func + self.db_name = db_name + +RELEASE_HASHES = [ + HashFunc('MD5Sum', apt_pkg.md5sum, 'md5'), + HashFunc('SHA1', apt_pkg.sha1sum, 'sha1'), + HashFunc('SHA256', apt_pkg.sha256sum, 'sha256'), +] + + class ReleaseWriter(object): def __init__(self, suite): self.suite = suite @@ -289,8 +303,7 @@ class ReleaseWriter(object): os.chdir(self.suite_path()) - hashfuncs = dict(zip([x.upper().replace('UM', 'um') for x in suite.checksums], - [getattr(apt_pkg, "%s" % (x)) for x in [x.replace("sum", "") + "sum" for x in suite.checksums]])) + hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums] fileinfo = {} @@ -321,8 +334,8 @@ class ReleaseWriter(object): fileinfo[filename]['len'] = len(contents) - for hf, func in hashfuncs.items(): - fileinfo[filename][hf] = func(contents) + for hf in hashes: + fileinfo[filename][hf.release_field] = hf.func(contents) for filename, comp in uncompnotseen.items(): # If we've already seen the uncompressed file, we don't @@ -337,46 +350,69 @@ class ReleaseWriter(object): fileinfo[filename]['len'] = len(contents) - for hf, func in hashfuncs.items(): - fileinfo[filename][hf] = func(contents) + for hf in hashes: + fileinfo[filename][hf.release_field] = hf.func(contents) - for h in sorted(hashfuncs.keys()): - out.write('%s:\n' % h) + for field in sorted(h.release_field for h in hashes): + out.write('%s:\n' % field) for filename in sorted(fileinfo.keys()): - out.write(" %s %8d %s\n" % (fileinfo[filename][h], fileinfo[filename]['len'], filename)) + out.write(" %s %8d %s\n" % (fileinfo[filename][field], fileinfo[filename]['len'], filename)) out.close() os.rename(outfile + '.new', outfile) + # Mark all by-hash files as obsolete. We will undo that for the ones + # we still reference later. + query = """ + UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP + WHERE suite_id = :id AND unreferenced IS NULL""" + session.execute(query, {'id': suite.suite_id}) + if suite.byhash: - query = """ - UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP - WHERE suite_id = :id AND unreferenced IS NULL""" - session.execute(query, {'id': suite.suite_id}) + query = "SELECT path FROM hashfile WHERE suite_id = :id" + q = session.execute(query, {'id': suite.suite_id}) + known_hashfiles = set(row[0] for row in q) + updated = [] + new = [] + # Update the hashfile table with new or updated files for filename in fileinfo: if not os.path.exists(filename): # probably an uncompressed index we didn't generate continue - - for h in hashfuncs: - hashfile = os.path.join(os.path.dirname(filename), 'by-hash', h, fileinfo[filename][h]) - query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id" - q = session.execute( - query, - {'p': hashfile, 'id': suite.suite_id}) - if q.rowcount: - session.execute(''' - UPDATE hashfile SET unreferenced = NULL - WHERE path = :p and suite_id = :id''', - {'p': hashfile, 'id': suite.suite_id}) + byhashdir = os.path.join(os.path.dirname(filename), 'by-hash') + for h in hashes: + field = h.release_field + hashfile = os.path.join(byhashdir, field, fileinfo[filename][field]) + if hashfile in known_hashfiles: + updated.append(hashfile) else: - session.execute(''' - INSERT INTO hashfile (path, suite_id) - VALUES (:p, :id)''', - {'p': hashfile, 'id': suite.suite_id}) + new.append(hashfile) + + if updated: + session.execute(""" + UPDATE hashfile SET unreferenced = NULL + WHERE path = ANY(:p) AND suite_id = :id""", + {'p': updated, 'id': suite.suite_id}) + if new: + session.execute(""" + INSERT INTO hashfile (path, suite_id) + VALUES (:p, :id)""", + [{'p': hashfile, 'id': suite.suite_id} for hashfile in new]) + + session.commit() + + if suite.byhash: + # Create hardlinks in by-hash directories + for filename in fileinfo: + if not os.path.exists(filename): + # probably an uncompressed index we didn't generate + continue + for h in hashes: + field = h.release_field + hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field]) try: os.makedirs(os.path.dirname(hashfile)) except OSError as exc: @@ -388,7 +424,6 @@ class ReleaseWriter(object): if exc.errno != errno.EEXIST: raise - session.commit() sign_release_dir(suite, os.path.dirname(outfile))