X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fgenerate_releases.py;h=cc6327f0c9d2030fcdd011a9b00e16dfb29931c9;hb=be8648509d039e6737fa601c61884747c6ee094e;hp=dc46b24b54a09ce235d7531b5bf9e65b3badfec1;hpb=2abbcc53125eb1e9288698ca3a734016b601453c;p=dak.git diff --git a/dak/generate_releases.py b/dak/generate_releases.py index dc46b24b..cc6327f0 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -125,7 +125,7 @@ class HashFunc(object): self.db_name = db_name RELEASE_HASHES = [ - HashFunc('MD5Sum', apt_pkg.md5sum, 'md5'), + HashFunc('MD5Sum', apt_pkg.md5sum, 'md5sum'), HashFunc('SHA1', apt_pkg.sha1sum, 'sha1'), HashFunc('SHA256', apt_pkg.sha256sum, 'sha256'), ] @@ -194,6 +194,69 @@ class ReleaseWriter(object): if e.errno != errno.EEXIST: raise + def _update_hashfile_table(self, session, fileinfo, hashes): + # Mark all by-hash files as obsolete. We will undo that for the ones + # we still reference later. + query = """ + UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP + WHERE suite_id = :id AND unreferenced IS NULL""" + session.execute(query, {'id': self.suite.suite_id}) + + if self.suite.byhash: + query = "SELECT path FROM hashfile WHERE suite_id = :id" + q = session.execute(query, {'id': self.suite.suite_id}) + known_hashfiles = set(row[0] for row in q) + updated = set() + new = set() + + # Update the hashfile table with new or updated files + for filename in fileinfo: + if not os.path.exists(filename): + # probably an uncompressed index we didn't generate + continue + byhashdir = os.path.join(os.path.dirname(filename), 'by-hash') + for h in hashes: + field = h.release_field + hashfile = os.path.join(byhashdir, field, fileinfo[filename][field]) + if hashfile in known_hashfiles: + updated.add(hashfile) + else: + new.add(hashfile) + + if updated: + session.execute(""" + UPDATE hashfile SET unreferenced = NULL + WHERE path = ANY(:p) AND suite_id = :id""", + {'p': list(updated), 'id': self.suite.suite_id}) + if new: + session.execute(""" + INSERT INTO hashfile (path, suite_id) + VALUES (:p, :id)""", + [{'p': hashfile, 'id': self.suite.suite_id} for hashfile in new]) + + session.commit() + + def _make_byhash_links(self, fileinfo, hashes): + # Create hardlinks in by-hash directories + for filename in fileinfo: + if not os.path.exists(filename): + # probably an uncompressed index we didn't generate + continue + + for h in hashes: + field = h.release_field + hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field]) + try: + os.makedirs(os.path.dirname(hashfile)) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + try: + os.link(filename, hashfile) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + def generate_release_files(self): """ Generate Release files for the given suite @@ -362,68 +425,9 @@ class ReleaseWriter(object): out.close() os.rename(outfile + '.new', outfile) - # Mark all by-hash files as obsolete. We will undo that for the ones - # we still reference later. - query = """ - UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP - WHERE suite_id = :id AND unreferenced IS NULL""" - session.execute(query, {'id': suite.suite_id}) - - if suite.byhash: - query = "SELECT path FROM hashfile WHERE suite_id = :id" - q = session.execute(query, {'id': suite.suite_id}) - known_hashfiles = set(row[0] for row in q) - updated = [] - new = [] - - # Update the hashfile table with new or updated files - for filename in fileinfo: - if not os.path.exists(filename): - # probably an uncompressed index we didn't generate - continue - byhashdir = os.path.join(os.path.dirname(filename), 'by-hash') - for h in hashes: - field = h.release_field - hashfile = os.path.join(byhashdir, field, fileinfo[filename][field]) - if hashfile in known_hashfiles: - updated.append(hashfile) - else: - new.append(hashfile) - - if updated: - session.execute(""" - UPDATE hashfile SET unreferenced = NULL - WHERE path = ANY(:p) AND suite_id = :id""", - {'p': updated, 'id': suite.suite_id}) - if new: - session.execute(""" - INSERT INTO hashfile (path, suite_id) - VALUES (:p, :id)""", - [{'p': hashfile, 'id': suite.suite_id} for hashfile in new]) - - session.commit() - + self._update_hashfile_table(session, fileinfo, hashes) if suite.byhash: - # Create hardlinks in by-hash directories - for filename in fileinfo: - if not os.path.exists(filename): - # probably an uncompressed index we didn't generate - continue - - for h in hashes: - field = h.release_field - hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field]) - try: - os.makedirs(os.path.dirname(hashfile)) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise - try: - os.link(filename, hashfile) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise - + self._make_byhash_links(fileinfo, hashes) sign_release_dir(suite, os.path.dirname(outfile))