import time
import gzip
import bz2
+import errno
import apt_pkg
import subprocess
from tempfile import mkstemp, mkdtemp
def __init__(self, suite):
self.suite = suite
+ def suite_path(self):
+ """
+ Absolute path to the suite-specific files.
+ """
+ cnf = Config()
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ return os.path.join(self.suite.archive.path, 'dists',
+ self.suite.suite_name, suite_suffix)
+
+ def suite_release_path(self):
+ """
+ Absolute path where Release files are physically stored.
+ This should be a path that sorts after the dists/ directory.
+ """
+ # TODO: Eventually always create Release in `zzz-dists` to avoid
+ # special cases. However we don't want to move existing Release files
+ # for released suites.
+ # See `create_release_symlinks` below.
+ if not self.suite.byhash:
+ return self.suite_path()
+
+ cnf = Config()
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ return os.path.join(self.suite.archive.path, 'zzz-dists',
+ self.suite.suite_name, suite_suffix)
+
+ def create_release_symlinks(self):
+ """
+ Create symlinks for Release files.
+ This creates the symlinks for Release files in the `suite_path`
+ to the actual files in `suite_release_path`.
+ """
+ # TODO: Eventually always create the links.
+ # See `suite_release_path` above.
+ if not self.suite.byhash:
+ return
+
+ relpath = os.path.relpath(self.suite_release_path(), self.suite_path())
+ for f in ("Release", "Release.gpg", "InRelease"):
+ source = os.path.join(relpath, f)
+ dest = os.path.join(self.suite_path(), f)
+ if not os.path.islink(dest):
+ os.unlink(dest)
+ elif os.readlink(dest) == source:
+ continue
+ else:
+ os.unlink(dest)
+ os.symlink(source, dest)
+
+ def create_output_directories(self):
+ for path in (self.suite_path(), self.suite_release_path()):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
def generate_release_files(self):
"""
Generate Release files for the given suite
('Label', 'label'),
('Suite', 'release_suite_output'),
('Version', 'version'),
- ('Codename', 'codename') )
+ ('Codename', 'codename'),
+ ('Changelogs', 'changelog_url'),
+ )
# A "Sub" Release file has slightly different fields
subattribs = ( ('Archive', 'suite_name'),
# Boolean stuff. If we find it true in database, write out "yes" into the release file
boolattrs = ( ('NotAutomatic', 'notautomatic'),
- ('ButAutomaticUpgrades', 'butautomaticupgrades') )
+ ('ButAutomaticUpgrades', 'butautomaticupgrades'),
+ ('Acquire-By-Hash', 'byhash'),
+ )
cnf = Config()
suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
- outfile = os.path.join(suite.archive.path, 'dists', suite.suite_name, suite_suffix, "Release")
+ self.create_output_directories()
+ self.create_release_symlinks()
+
+ outfile = os.path.join(self.suite_release_path(), "Release")
out = open(outfile + ".new", "w")
for key, dbfield in attribs:
# Hack to skip NULL Version fields as we used to do this
# We should probably just always ignore anything which is None
- if key == "Version" and getattr(suite, dbfield) is None:
+ if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None:
continue
out.write("%s: %s\n" % (key, getattr(suite, dbfield)))
out.write("Description: %s\n" % suite.description)
for comp in components:
- for dirpath, dirnames, filenames in os.walk(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix, comp), topdown=True):
+ for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True):
if not re_gensubrelease.match(dirpath):
continue
# their checksums to the main Release file
oldcwd = os.getcwd()
- os.chdir(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix))
+ os.chdir(self.suite_path())
- hashfuncs = { 'MD5Sum' : apt_pkg.md5sum,
- 'SHA1' : apt_pkg.sha1sum,
- 'SHA256' : apt_pkg.sha256sum }
+ hashfuncs = dict(zip([x.upper().replace('UM', 'um') for x in suite.checksums],
+ [getattr(apt_pkg, "%s" % (x)) for x in [x.replace("sum", "") + "sum" for x in suite.checksums]]))
fileinfo = {}
# If we find a file for which we have a compressed version and
# haven't yet seen the uncompressed one, store the possibility
# for future use
- if entry.endswith(".gz") and entry[:-3] not in uncompnotseen:
+ if entry.endswith(".gz") and filename[:-3] not in uncompnotseen:
uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
- elif entry.endswith(".bz2") and entry[:-4] not in uncompnotseen:
+ elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen:
uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
- elif entry.endswith(".xz") and entry[:-3] not in uncompnotseen:
+ elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen:
uncompnotseen[filename[:-3]] = (XzFile, filename)
fileinfo[filename]['len'] = len(contents)
out.close()
os.rename(outfile + '.new', outfile)
+ if suite.byhash:
+ query = """
+ UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP
+ WHERE suite_id = :id AND unreferenced IS NULL"""
+ session.execute(query, {'id': suite.suite_id})
+
+ for filename in fileinfo:
+ if not os.path.exists(filename):
+ # probably an uncompressed index we didn't generate
+ continue
+
+ for h in hashfuncs:
+ hashfile = os.path.join(os.path.dirname(filename), 'by-hash', h, fileinfo[filename][h])
+ query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id"
+ q = session.execute(
+ query,
+ {'p': hashfile, 'id': suite.suite_id})
+ if q.rowcount:
+ session.execute('''
+ UPDATE hashfile SET unreferenced = NULL
+ WHERE path = :p and suite_id = :id''',
+ {'p': hashfile, 'id': suite.suite_id})
+ else:
+ session.execute('''
+ INSERT INTO hashfile (path, suite_id)
+ VALUES (:p, :id)''',
+ {'p': hashfile, 'id': suite.suite_id})
+
+ try:
+ os.makedirs(os.path.dirname(hashfile))
+ except OSError as exc:
+ if exc.errno != errno.EEXIST:
+ raise
+ try:
+ os.link(filename, hashfile)
+ except OSError as exc:
+ if exc.errno != errno.EEXIST:
+ raise
+
+ session.commit()
+
sign_release_dir(suite, os.path.dirname(outfile))
os.chdir(oldcwd)