-################################################################################
-
-def add_tiffani (files, path, indexstem):
- index = "%s.diff/Index" % (indexstem)
- filepath = "%s/%s" % (path, index)
- if os.path.exists(filepath):
- #print "ALERT: there was a tiffani file %s" % (filepath)
- files.append(index)
-
-def compressnames (tree,type,file):
- compress = AptCnf.get("%s::%s::Compress" % (tree,type), AptCnf.get("Default::%s::Compress" % (type), ". gzip"))
- result = []
- cl = compress.split()
- uncompress = ("." not in cl)
- for mode in compress.split():
- if mode == ".":
- result.append(file)
- elif mode == "gzip":
- if uncompress:
- result.append("<zcat/.gz>" + file)
- uncompress = 0
- result.append(file + ".gz")
- elif mode == "bzip2":
- if uncompress:
- result.append("<bzcat/.bz2>" + file)
- uncompress = 0
- result.append(file + ".bz2")
- return result
-
-decompressors = { 'zcat' : gzip.GzipFile,
- 'bzip2' : bz2.BZ2File }
-
-def print_md5sha_files (tree, files, hashop):
- path = Cnf["Dir::Root"] + tree + "/"
- for name in files:
- hashvalue = ""
- hashlen = 0
- try:
- if name[0] == "<":
- j = name.index("/")
- k = name.index(">")
- (cat, ext, name) = (name[1:j], name[j+1:k], name[k+1:])
- file_handle = decompressors[ cat ]( "%s%s%s" % (path, name, ext) )
- contents = file_handle.read()
- hashvalue = hashop(contents)
- hashlen = len(contents)
+########################################################################
+
+def sign_release_dir(suite, dirname):
+ cnf = Config()
+
+ if cnf.has_key("Dinstall::SigningKeyring"):
+ keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
+ if cnf.has_key("Dinstall::SigningPubKeyring"):
+ keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
+
+ arguments = "--no-options --batch --no-tty --armour --personal-digest-preferences=SHA256"
+
+ relname = os.path.join(dirname, 'Release')
+
+ dest = os.path.join(dirname, 'Release.gpg')
+ if os.path.exists(dest):
+ os.unlink(dest)
+
+ inlinedest = os.path.join(dirname, 'InRelease')
+ if os.path.exists(inlinedest):
+ os.unlink(inlinedest)
+
+ defkeyid=""
+ for keyid in suite.signingkeys or []:
+ defkeyid += "--local-user %s " % keyid
+
+ os.system("gpg %s %s %s --detach-sign <%s >>%s" %
+ (keyring, defkeyid, arguments, relname, dest))
+ os.system("gpg %s %s %s --clearsign <%s >>%s" %
+ (keyring, defkeyid, arguments, relname, inlinedest))
+
+class XzFile(object):
+ def __init__(self, filename, mode='r'):
+ self.filename = filename
+ def read(self):
+ cmd = ("xz", "-d")
+ with open(self.filename, 'r') as stdin:
+ process = daklib.daksubprocess.Popen(cmd, stdin=stdin, stdout=subprocess.PIPE)
+ (stdout, stderr) = process.communicate()
+ return stdout
+
+
+class HashFunc(object):
+ def __init__(self, release_field, func, db_name):
+ self.release_field = release_field
+ self.func = func
+ self.db_name = db_name
+
+RELEASE_HASHES = [
+ HashFunc('MD5Sum', apt_pkg.md5sum, 'md5'),
+ HashFunc('SHA1', apt_pkg.sha1sum, 'sha1'),
+ HashFunc('SHA256', apt_pkg.sha256sum, 'sha256'),
+]
+
+
+class ReleaseWriter(object):
+ def __init__(self, suite):
+ self.suite = suite
+
+ def suite_path(self):
+ """
+ Absolute path to the suite-specific files.
+ """
+ cnf = Config()
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ return os.path.join(self.suite.archive.path, 'dists',
+ self.suite.suite_name, suite_suffix)
+
+ def suite_release_path(self):
+ """
+ Absolute path where Release files are physically stored.
+ This should be a path that sorts after the dists/ directory.
+ """
+ # TODO: Eventually always create Release in `zzz-dists` to avoid
+ # special cases. However we don't want to move existing Release files
+ # for released suites.
+ # See `create_release_symlinks` below.
+ if not self.suite.byhash:
+ return self.suite_path()
+
+ cnf = Config()
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ return os.path.join(self.suite.archive.path, 'zzz-dists',
+ self.suite.suite_name, suite_suffix)
+
+ def create_release_symlinks(self):
+ """
+ Create symlinks for Release files.
+ This creates the symlinks for Release files in the `suite_path`
+ to the actual files in `suite_release_path`.
+ """
+ # TODO: Eventually always create the links.
+ # See `suite_release_path` above.
+ if not self.suite.byhash:
+ return
+
+ relpath = os.path.relpath(self.suite_release_path(), self.suite_path())
+ for f in ("Release", "Release.gpg", "InRelease"):
+ source = os.path.join(relpath, f)
+ dest = os.path.join(self.suite_path(), f)
+ if not os.path.islink(dest):
+ os.unlink(dest)
+ elif os.readlink(dest) == source:
+ continue