X-Git-Url: https://git.decadent.org.uk/gitweb/?p=dak.git;a=blobdiff_plain;f=dak%2Fgenerate_releases.py;h=cc6327f0c9d2030fcdd011a9b00e16dfb29931c9;hp=ae3577f3db998d17e7cad6837e58673aa113bcc8;hb=026bdb9580e6d18bf504d2f44d46890df5d89f1a;hpb=d6f184c345b49d95274f245c78d0a7976dcbc115 diff --git a/dak/generate_releases.py b/dak/generate_releases.py index ae3577f3..cc6327f0 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -125,7 +125,7 @@ class HashFunc(object): self.db_name = db_name RELEASE_HASHES = [ - HashFunc('MD5Sum', apt_pkg.md5sum, 'md5'), + HashFunc('MD5Sum', apt_pkg.md5sum, 'md5sum'), HashFunc('SHA1', apt_pkg.sha1sum, 'sha1'), HashFunc('SHA256', apt_pkg.sha256sum, 'sha256'), ] @@ -194,6 +194,69 @@ class ReleaseWriter(object): if e.errno != errno.EEXIST: raise + def _update_hashfile_table(self, session, fileinfo, hashes): + # Mark all by-hash files as obsolete. We will undo that for the ones + # we still reference later. + query = """ + UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP + WHERE suite_id = :id AND unreferenced IS NULL""" + session.execute(query, {'id': self.suite.suite_id}) + + if self.suite.byhash: + query = "SELECT path FROM hashfile WHERE suite_id = :id" + q = session.execute(query, {'id': self.suite.suite_id}) + known_hashfiles = set(row[0] for row in q) + updated = set() + new = set() + + # Update the hashfile table with new or updated files + for filename in fileinfo: + if not os.path.exists(filename): + # probably an uncompressed index we didn't generate + continue + byhashdir = os.path.join(os.path.dirname(filename), 'by-hash') + for h in hashes: + field = h.release_field + hashfile = os.path.join(byhashdir, field, fileinfo[filename][field]) + if hashfile in known_hashfiles: + updated.add(hashfile) + else: + new.add(hashfile) + + if updated: + session.execute(""" + UPDATE hashfile SET unreferenced = NULL + WHERE path = ANY(:p) AND suite_id = :id""", + {'p': list(updated), 'id': self.suite.suite_id}) + if new: + session.execute(""" + INSERT INTO hashfile (path, suite_id) + VALUES (:p, :id)""", + [{'p': hashfile, 'id': self.suite.suite_id} for hashfile in new]) + + session.commit() + + def _make_byhash_links(self, fileinfo, hashes): + # Create hardlinks in by-hash directories + for filename in fileinfo: + if not os.path.exists(filename): + # probably an uncompressed index we didn't generate + continue + + for h in hashes: + field = h.release_field + hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field]) + try: + os.makedirs(os.path.dirname(hashfile)) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + try: + os.link(filename, hashfile) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + def generate_release_files(self): """ Generate Release files for the given suite @@ -362,56 +425,9 @@ class ReleaseWriter(object): out.close() os.rename(outfile + '.new', outfile) - query = """ - UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP - WHERE suite_id = :id AND unreferenced IS NULL""" - session.execute(query, {'id': suite.suite_id}) - - if suite.byhash: - for filename in fileinfo: - if not os.path.exists(filename): - # probably an uncompressed index we didn't generate - continue - - for h in hashes: - field = h.release_field - hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field]) - query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id" - q = session.execute( - query, - {'p': hashfile, 'id': suite.suite_id}) - if q.rowcount: - session.execute(''' - UPDATE hashfile SET unreferenced = NULL - WHERE path = :p and suite_id = :id''', - {'p': hashfile, 'id': suite.suite_id}) - else: - session.execute(''' - INSERT INTO hashfile (path, suite_id) - VALUES (:p, :id)''', - {'p': hashfile, 'id': suite.suite_id}) - session.commit() - + self._update_hashfile_table(session, fileinfo, hashes) if suite.byhash: - for filename in fileinfo: - if not os.path.exists(filename): - # probably an uncompressed index we didn't generate - continue - - for h in hashes: - field = h.release_field - hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field]) - try: - os.makedirs(os.path.dirname(hashfile)) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise - try: - os.link(filename, hashfile) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise - + self._make_byhash_links(fileinfo, hashes) sign_release_dir(suite, os.path.dirname(outfile))