]> git.decadent.org.uk Git - dak.git/blobdiff - dak/generate_releases.py
generate_releases: replace hashfuncs initialization with a legible version
[dak.git] / dak / generate_releases.py
index 82ff6394bffa3b92765d6f403a121c3b939c10cc..d947def8a3cca2a886bc9e9db11f6679459207cf 100755 (executable)
@@ -117,10 +117,83 @@ class XzFile(object):
             (stdout, stderr) = process.communicate()
             return stdout
 
+
+class HashFunc(object):
+    def __init__(self, release_field, func, db_name):
+        self.release_field = release_field
+        self.func = func
+        self.db_name = db_name
+
+RELEASE_HASHES = [
+    HashFunc('MD5Sum', apt_pkg.md5sum, 'md5'),
+    HashFunc('SHA1', apt_pkg.sha1sum, 'sha1'),
+    HashFunc('SHA256', apt_pkg.sha256sum, 'sha256'),
+]
+
+
 class ReleaseWriter(object):
     def __init__(self, suite):
         self.suite = suite
 
+    def suite_path(self):
+        """
+        Absolute path to the suite-specific files.
+        """
+        cnf = Config()
+        suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+        return os.path.join(self.suite.archive.path, 'dists',
+                            self.suite.suite_name, suite_suffix)
+
+    def suite_release_path(self):
+        """
+        Absolute path where Release files are physically stored.
+        This should be a path that sorts after the dists/ directory.
+        """
+        # TODO: Eventually always create Release in `zzz-dists` to avoid
+        # special cases. However we don't want to move existing Release files
+        # for released suites.
+        # See `create_release_symlinks` below.
+        if not self.suite.byhash:
+            return self.suite_path()
+
+        cnf = Config()
+        suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+        return os.path.join(self.suite.archive.path, 'zzz-dists',
+                            self.suite.suite_name, suite_suffix)
+
+    def create_release_symlinks(self):
+        """
+        Create symlinks for Release files.
+        This creates the symlinks for Release files in the `suite_path`
+        to the actual files in `suite_release_path`.
+        """
+        # TODO: Eventually always create the links.
+        # See `suite_release_path` above.
+        if not self.suite.byhash:
+            return
+
+        relpath = os.path.relpath(self.suite_release_path(), self.suite_path())
+        for f in ("Release", "Release.gpg", "InRelease"):
+            source = os.path.join(relpath, f)
+            dest = os.path.join(self.suite_path(), f)
+            if not os.path.islink(dest):
+                os.unlink(dest)
+            elif os.readlink(dest) == source:
+                continue
+            else:
+                os.unlink(dest)
+            os.symlink(source, dest)
+
+    def create_output_directories(self):
+        for path in (self.suite_path(), self.suite_release_path()):
+            try:
+                os.makedirs(path)
+            except OSError as e:
+                if e.errno != errno.EEXIST:
+                    raise
+
     def generate_release_files(self):
         """
         Generate Release files for the given suite
@@ -160,7 +233,10 @@ class ReleaseWriter(object):
 
         suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
 
-        outfile = os.path.join(suite.archive.path, 'dists', suite.suite_name, suite_suffix, "Release")
+        self.create_output_directories()
+        self.create_release_symlinks()
+
+        outfile = os.path.join(self.suite_release_path(), "Release")
         out = open(outfile + ".new", "w")
 
         for key, dbfield in attribs:
@@ -193,7 +269,7 @@ class ReleaseWriter(object):
             out.write("Description: %s\n" % suite.description)
 
         for comp in components:
-            for dirpath, dirnames, filenames in os.walk(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix, comp), topdown=True):
+            for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True):
                 if not re_gensubrelease.match(dirpath):
                     continue
 
@@ -225,10 +301,9 @@ class ReleaseWriter(object):
         # their checksums to the main Release file
         oldcwd = os.getcwd()
 
-        os.chdir(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix))
+        os.chdir(self.suite_path())
 
-        hashfuncs = dict(zip([x.upper().replace('UM', 'um') for x in suite.checksums],
-                             [getattr(apt_pkg, "%s" % (x)) for x in [x.replace("sum", "") + "sum" for x in suite.checksums]]))
+        hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]
 
         fileinfo = {}
 
@@ -259,8 +334,8 @@ class ReleaseWriter(object):
 
                 fileinfo[filename]['len'] = len(contents)
 
-                for hf, func in hashfuncs.items():
-                    fileinfo[filename][hf] = func(contents)
+                for hf in hashes:
+                    fileinfo[filename][hf.release_field] = hf.func(contents)
 
         for filename, comp in uncompnotseen.items():
             # If we've already seen the uncompressed file, we don't
@@ -275,14 +350,14 @@ class ReleaseWriter(object):
 
             fileinfo[filename]['len'] = len(contents)
 
-            for hf, func in hashfuncs.items():
-                fileinfo[filename][hf] = func(contents)
+            for hf in hashes:
+                fileinfo[filename][hf.release_field] = hf.func(contents)
 
 
-        for h in sorted(hashfuncs.keys()):
-            out.write('%s:\n' % h)
+        for field in sorted(h.release_field for h in hashes):
+            out.write('%s:\n' % field)
             for filename in sorted(fileinfo.keys()):
-                out.write(" %s %8d %s\n" % (fileinfo[filename][h], fileinfo[filename]['len'], filename))
+                out.write(" %s %8d %s\n" % (fileinfo[filename][field], fileinfo[filename]['len'], filename))
 
         out.close()
         os.rename(outfile + '.new', outfile)
@@ -298,8 +373,9 @@ class ReleaseWriter(object):
                     # probably an uncompressed index we didn't generate
                     continue
 
-                for h in hashfuncs:
-                    hashfile = os.path.join(os.path.dirname(filename), 'by-hash', h, fileinfo[filename][h])
+                for h in hashes:
+                    field = h.release_field
+                    hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field])
                     query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id"
                     q = session.execute(
                             query,