X-Git-Url: https://git.decadent.org.uk/gitweb/?p=dak.git;a=blobdiff_plain;f=dak%2Fmake_changelog.py;h=ca8ed95eeadaf92907e50df2abbb203b2af8adab;hp=7dacf8252afd63f616528099aaa4289edd965110;hb=245c6549dbacaeab5ee36ec74372b1df8675b477;hpb=eef945db2ebe8dedfb837bab63dcc8d5f62ba36d diff --git a/dak/make_changelog.py b/dak/make_changelog.py index 7dacf825..ca8ed95e 100755 --- a/dak/make_changelog.py +++ b/dak/make_changelog.py @@ -54,14 +54,16 @@ import sys import apt_pkg from glob import glob from shutil import rmtree +from yaml import safe_dump from daklib.dbconn import * from daklib import utils -from daklib.config import Config from daklib.contents import UnpackedSource from daklib.regexes import re_no_epoch ################################################################################ +filelist = 'filelist.yaml' + def usage (exit_code=0): print """Generate changelog between two suites @@ -170,7 +172,7 @@ def export_files(session, archive, clpool, progress=False): sources = {} unpack = {} - files = ('changelog', 'copyright', 'NEWS.Debian', 'README.Debian') + files = ('changelog', 'copyright', 'NEWS', 'NEWS.Debian', 'README.Debian') stats = {'unpack': 0, 'created': 0, 'removed': 0, 'errors': 0, 'files': 0} query = """SELECT DISTINCT s.source, su.suite_name AS suite, s.version, c.name || '/' || f.filename AS filename FROM source s @@ -194,12 +196,12 @@ def export_files(session, archive, clpool, progress=False): if not os.path.exists(path): os.makedirs(path) if not os.path.exists(os.path.join(path, \ - '%s_%s.changelog' % (p, sources[p][s][0]))): + '%s_%s_changelog' % (p, sources[p][s][0]))): if not unpack.has_key(os.path.join(pool, sources[p][s][1])): unpack[os.path.join(pool, sources[p][s][1])] = (path, set()) unpack[os.path.join(pool, sources[p][s][1])][1].add(s) else: - for file in glob('%s/%s_%s*' % (path, p, sources[p][s][0])): + for file in glob('%s/%s_%s_*' % (path, p, sources[p][s][0])): link = '%s%s' % (s, file.split('%s_%s' \ % (p, sources[p][s][0]))[1]) try: @@ -211,7 +213,7 @@ def export_files(session, archive, clpool, progress=False): for p in unpack.keys(): package = os.path.splitext(os.path.basename(p))[0].split('_') try: - unpacked = UnpackedSource(p) + unpacked = UnpackedSource(p, clpool) tempdir = unpacked.get_root_directory() stats['unpack'] += 1 if progress: @@ -222,9 +224,9 @@ def export_files(session, archive, clpool, progress=False): for file in files: for f in glob(os.path.join(tempdir, 'debian', '*%s' % file)): for s in unpack[p][1]: - suite = os.path.join(unpack[p][0], '%s.%s' \ + suite = os.path.join(unpack[p][0], '%s_%s' \ % (s, os.path.basename(f))) - version = os.path.join(unpack[p][0], '%s_%s.%s' % \ + version = os.path.join(unpack[p][0], '%s_%s_%s' % \ (package[0], package[1], os.path.basename(f))) if not os.path.exists(version): os.link(f, version) @@ -240,20 +242,27 @@ def export_files(session, archive, clpool, progress=False): print 'make-changelog: unable to unpack %s\n%s' % (p, e) stats['errors'] += 1 - for root, dirs, files in os.walk(clpool): + for root, dirs, files in os.walk(clpool, topdown=False): + files = [f for f in files if f != filelist] if len(files): - if root.split('/')[-1] not in sources.keys(): - if os.path.exists(root): - rmtree(root) - stats['removed'] += 1 + if root != clpool: + if root.split('/')[-1] not in sources.keys(): + if os.path.exists(root): + stats['removed'] += len(os.listdir(root)) + rmtree(root) for file in files: if os.path.exists(os.path.join(root, file)): if os.stat(os.path.join(root, file)).st_nlink == 1: - os.unlink(os.path.join(root, file)) stats['removed'] += 1 - - for root, dirs, files in os.walk(clpool): + os.unlink(os.path.join(root, file)) + for dir in dirs: + try: + os.rmdir(os.path.join(root, dir)) + except OSError: + pass stats['files'] += len(files) + stats['files'] -= stats['removed'] + print 'make-changelog: file exporting finished' print ' * New packages unpacked: %d' % stats['unpack'] print ' * New files created: %d' % stats['created'] @@ -261,9 +270,28 @@ def export_files(session, archive, clpool, progress=False): print ' * Unpack errors: %d' % stats['errors'] print ' * Files available into changelog pool: %d' % stats['files'] +def generate_export_filelist(clpool): + clfiles = {} + for root, dirs, files in os.walk(clpool): + for file in [f for f in files if f != filelist]: + clpath = os.path.join(root, file).replace(clpool, '').strip('/') + source = clpath.split('/')[2] + elements = clpath.split('/')[3].split('_') + if source not in clfiles: + clfiles[source] = {} + if elements[0] == source: + if elements[1] not in clfiles[source]: + clfiles[source][elements[1]] = [] + clfiles[source][elements[1]].append(clpath) + else: + if elements[0] not in clfiles[source]: + clfiles[source][elements[0]] = [] + clfiles[source][elements[0]].append(clpath) + with open(os.path.join(clpool, filelist), 'w+') as fd: + safe_dump(clfiles, fd, default_flow_style=False) + def main(): Cnf = utils.get_conf() - cnf = Config() Arguments = [('h','help','Make-Changelog::Options::Help'), ('a','archive','Make-Changelog::Options::Archive','HasArg'), ('s','suite','Make-Changelog::Options::Suite','HasArg'), @@ -294,10 +322,11 @@ def main(): session = DBConn().session() if export: - if cnf.exportpath: - archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one() - exportpath = os.path.join(Cnf['Dir::Export'], cnf.exportpath) + archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one() + exportpath = archive.changelog + if exportpath: export_files(session, archive, exportpath, progress) + generate_export_filelist(exportpath) else: utils.fubar('No changelog export path defined') elif binnmu: