X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fgenerate_releases.py;h=82ff6394bffa3b92765d6f403a121c3b939c10cc;hb=3d25c468b152899041da69857c3f01e9996b39d1;hp=3006364602517c17466c075afa82efc0ee7fed56;hpb=475051efae41a30723cdc1ab82c521cd1accf75b;p=dak.git diff --git a/dak/generate_releases.py b/dak/generate_releases.py index 30063646..82ff6394 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -37,7 +37,9 @@ import stat import time import gzip import bz2 +import errno import apt_pkg +import subprocess from tempfile import mkstemp, mkdtemp import commands from sqlalchemy.orm import object_session @@ -48,6 +50,7 @@ from daklib.dak_exceptions import * from daklib.dbconn import * from daklib.config import Config from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS +import daklib.daksubprocess ################################################################################ Logger = None #: Our logging object @@ -60,13 +63,15 @@ def usage (exit_code=0): print """Usage: dak generate-releases [OPTIONS] Generate the Release files + -a, --archive=ARCHIVE process suites in ARCHIVE -s, --suite=SUITE(s) process this suite Default: All suites not marked 'untouchable' -f, --force Allow processing of untouchable suites CAREFUL: Only to be used at (point) release time! -h, --help show this help and exit + -q, --quiet Don't output progress -SUITE can be a space seperated list, e.g. +SUITE can be a space separated list, e.g. --suite=unstable testing """ sys.exit(exit_code) @@ -81,7 +86,7 @@ def sign_release_dir(suite, dirname): if cnf.has_key("Dinstall::SigningPubKeyring"): keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"] - arguments = "--no-options --batch --no-tty --armour" + arguments = "--no-options --batch --no-tty --armour --personal-digest-preferences=SHA256" relname = os.path.join(dirname, 'Release') @@ -93,20 +98,24 @@ def sign_release_dir(suite, dirname): if os.path.exists(inlinedest): os.unlink(inlinedest) - # We can only use one key for inline signing so use the first one in - # the array for consistency - firstkey = True - - for keyid in suite.signingkeys: - defkeyid = "--default-key %s" % keyid - - os.system("gpg %s %s %s --detach-sign <%s >>%s" % - (keyring, defkeyid, arguments, relname, dest)) - - if firstkey: - os.system("gpg %s %s %s --clearsign <%s >>%s" % - (keyring, defkeyid, arguments, relname, inlinedest)) - firstkey = False + defkeyid="" + for keyid in suite.signingkeys or []: + defkeyid += "--local-user %s " % keyid + + os.system("gpg %s %s %s --detach-sign <%s >>%s" % + (keyring, defkeyid, arguments, relname, dest)) + os.system("gpg %s %s %s --clearsign <%s >>%s" % + (keyring, defkeyid, arguments, relname, inlinedest)) + +class XzFile(object): + def __init__(self, filename, mode='r'): + self.filename = filename + def read(self): + cmd = ("xz", "-d") + with open(self.filename, 'r') as stdin: + process = daklib.daksubprocess.Popen(cmd, stdin=stdin, stdout=subprocess.PIPE) + (stdout, stderr) = process.communicate() + return stdout class ReleaseWriter(object): def __init__(self, suite): @@ -129,9 +138,11 @@ class ReleaseWriter(object): # fill them in attribs = ( ('Origin', 'origin'), ('Label', 'label'), - ('Suite', 'suite_name'), + ('Suite', 'release_suite_output'), ('Version', 'version'), - ('Codename', 'codename') ) + ('Codename', 'codename'), + ('Changelogs', 'changelog_url'), + ) # A "Sub" Release file has slightly different fields subattribs = ( ('Archive', 'suite_name'), @@ -141,22 +152,24 @@ class ReleaseWriter(object): # Boolean stuff. If we find it true in database, write out "yes" into the release file boolattrs = ( ('NotAutomatic', 'notautomatic'), - ('ButAutomaticUpgrades', 'butautomaticupgrades') ) + ('ButAutomaticUpgrades', 'butautomaticupgrades'), + ('Acquire-By-Hash', 'byhash'), + ) cnf = Config() - suite_suffix = "%s" % (cnf.Find("Dinstall::SuiteSuffix")) + suite_suffix = cnf.find("Dinstall::SuiteSuffix", "") - outfile = os.path.join(cnf["Dir::Root"], 'dists', "%s/%s" % (suite.suite_name, suite_suffix), "Release") + outfile = os.path.join(suite.archive.path, 'dists', suite.suite_name, suite_suffix, "Release") out = open(outfile + ".new", "w") for key, dbfield in attribs: - if getattr(suite, dbfield) is not None: - # TEMPORARY HACK HACK HACK until we change the way we store the suite names etc - if key == 'Suite' and getattr(suite, dbfield) == 'squeeze-updates': - out.write("Suite: stable-updates\n") - else: - out.write("%s: %s\n" % (key, getattr(suite, dbfield))) + # Hack to skip NULL Version fields as we used to do this + # We should probably just always ignore anything which is None + if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None: + continue + + out.write("%s: %s\n" % (key, getattr(suite, dbfield))) out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time())))) @@ -170,11 +183,9 @@ class ReleaseWriter(object): out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures]))) - ## FIXME: Components need to be adjusted to whatever will be in the db - ## Needs putting in the DB - components = ['main', 'contrib', 'non-free'] + components = [ c.component_name for c in suite.components ] - out.write("Components: %s\n" % ( " ".join(map(lambda x: "%s%s" % (suite_suffix, x), components )))) + out.write("Components: %s\n" % (" ".join(components))) # For exact compatibility with old g-r, write out Description here instead # of with the rest of the DB fields above @@ -182,7 +193,7 @@ class ReleaseWriter(object): out.write("Description: %s\n" % suite.description) for comp in components: - for dirpath, dirnames, filenames in os.walk("%sdists/%s/%s%s" % (cnf["Dir::Root"], suite.suite_name, suite_suffix, comp), topdown=True): + for dirpath, dirnames, filenames in os.walk(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix, comp), topdown=True): if not re_gensubrelease.match(dirpath): continue @@ -214,11 +225,10 @@ class ReleaseWriter(object): # their checksums to the main Release file oldcwd = os.getcwd() - os.chdir("%sdists/%s/%s" % (cnf["Dir::Root"], suite.suite_name, suite_suffix)) + os.chdir(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix)) - hashfuncs = { 'MD5Sum' : apt_pkg.md5sum, - 'SHA1' : apt_pkg.sha1sum, - 'SHA256' : apt_pkg.sha256sum } + hashfuncs = dict(zip([x.upper().replace('UM', 'um') for x in suite.checksums], + [getattr(apt_pkg, "%s" % (x)) for x in [x.replace("sum", "") + "sum" for x in suite.checksums]])) fileinfo = {} @@ -240,10 +250,12 @@ class ReleaseWriter(object): # If we find a file for which we have a compressed version and # haven't yet seen the uncompressed one, store the possibility # for future use - if entry.endswith(".gz") and entry[:-3] not in uncompnotseen.keys(): + if entry.endswith(".gz") and filename[:-3] not in uncompnotseen: uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename) - elif entry.endswith(".bz2") and entry[:-4] not in uncompnotseen.keys(): + elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen: uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename) + elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen: + uncompnotseen[filename[:-3]] = (XzFile, filename) fileinfo[filename]['len'] = len(contents) @@ -253,12 +265,7 @@ class ReleaseWriter(object): for filename, comp in uncompnotseen.items(): # If we've already seen the uncompressed file, we don't # need to do anything again - if filename in fileinfo.keys(): - continue - - # Skip uncompressed Contents files as they're huge, take ages to - # checksum and we checksum the compressed ones anyways - if os.path.basename(filename).startswith("Contents"): + if filename in fileinfo: continue fileinfo[filename] = {} @@ -280,6 +287,47 @@ class ReleaseWriter(object): out.close() os.rename(outfile + '.new', outfile) + if suite.byhash: + query = """ + UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP + WHERE suite_id = :id AND unreferenced IS NULL""" + session.execute(query, {'id': suite.suite_id}) + + for filename in fileinfo: + if not os.path.exists(filename): + # probably an uncompressed index we didn't generate + continue + + for h in hashfuncs: + hashfile = os.path.join(os.path.dirname(filename), 'by-hash', h, fileinfo[filename][h]) + query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id" + q = session.execute( + query, + {'p': hashfile, 'id': suite.suite_id}) + if q.rowcount: + session.execute(''' + UPDATE hashfile SET unreferenced = NULL + WHERE path = :p and suite_id = :id''', + {'p': hashfile, 'id': suite.suite_id}) + else: + session.execute(''' + INSERT INTO hashfile (path, suite_id) + VALUES (:p, :id)''', + {'p': hashfile, 'id': suite.suite_id}) + + try: + os.makedirs(os.path.dirname(hashfile)) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + try: + os.link(filename, hashfile) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + + session.commit() + sign_release_dir(suite, os.path.dirname(outfile)) os.chdir(oldcwd) @@ -292,21 +340,25 @@ def main (): cnf = Config() - for i in ["Help", "Suite", "Force"]: + for i in ["Help", "Suite", "Force", "Quiet"]: if not cnf.has_key("Generate-Releases::Options::%s" % (i)): cnf["Generate-Releases::Options::%s" % (i)] = "" Arguments = [('h',"help","Generate-Releases::Options::Help"), + ('a','archive','Generate-Releases::Options::Archive','HasArg'), ('s',"suite","Generate-Releases::Options::Suite"), - ('f',"force","Generate-Releases::Options::Force")] + ('f',"force","Generate-Releases::Options::Force"), + ('q',"quiet","Generate-Releases::Options::Quiet"), + ('o','option','','ArbItem')] - suite_names = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv) - Options = cnf.SubTree("Generate-Releases::Options") + suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) + Options = cnf.subtree("Generate-Releases::Options") if Options["Help"]: usage() - Logger = daklog.Logger(cnf, 'generate-releases') + Logger = daklog.Logger('generate-releases') + pool = DakProcessPool() session = DBConn().session() @@ -320,19 +372,21 @@ def main (): print "cannot find suite %s" % s Logger.log(['cannot find suite %s' % s]) else: - suites = session.query(Suite).filter(Suite.untouchable == False).all() + query = session.query(Suite).filter(Suite.untouchable == False) + if 'Archive' in Options: + query = query.join(Suite.archive).filter(Archive.archive_name==Options['Archive']) + suites = query.all() broken=[] - pool = DakProcessPool() - for s in suites: # Setup a multiprocessing Pool. As many workers as we have CPU cores. if s.untouchable and not Options["Force"]: print "Skipping %s (untouchable)" % s.suite_name continue - print "Processing %s" % s.suite_name + if not Options["Quiet"]: + print "Processing %s" % s.suite_name Logger.log(['Processing release file for Suite: %s' % (s.suite_name)]) pool.apply_async(generate_helper, (s.suite_id, ))