]> git.decadent.org.uk Git - dak.git/blobdiff - dak/make_changelog.py
Use correct db_name for MD5 hash
[dak.git] / dak / make_changelog.py
index 50307b158250271769b595e8315421fb03f4efd3..ca8ed95eeadaf92907e50df2abbb203b2af8adab 100755 (executable)
@@ -54,14 +54,16 @@ import sys
 import apt_pkg
 from glob import glob
 from shutil import rmtree
+from yaml import safe_dump
 from daklib.dbconn import *
 from daklib import utils
-from daklib.config import Config
 from daklib.contents import UnpackedSource
 from daklib.regexes import re_no_epoch
 
 ################################################################################
 
+filelist = 'filelist.yaml'
+
 def usage (exit_code=0):
     print """Generate changelog between two suites
 
@@ -170,7 +172,7 @@ def export_files(session, archive, clpool, progress=False):
 
     sources = {}
     unpack = {}
-    files = ('changelog', 'copyright', 'NEWS.Debian', 'README.Debian')
+    files = ('changelog', 'copyright', 'NEWS', 'NEWS.Debian', 'README.Debian')
     stats = {'unpack': 0, 'created': 0, 'removed': 0, 'errors': 0, 'files': 0}
     query = """SELECT DISTINCT s.source, su.suite_name AS suite, s.version, c.name || '/' || f.filename AS filename
                FROM source s
@@ -199,7 +201,7 @@ def export_files(session, archive, clpool, progress=False):
                     unpack[os.path.join(pool, sources[p][s][1])] = (path, set())
                 unpack[os.path.join(pool, sources[p][s][1])][1].add(s)
             else:
-                for file in glob('%s/%s_%s*' % (path, p, sources[p][s][0])):
+                for file in glob('%s/%s_%s_*' % (path, p, sources[p][s][0])):
                     link = '%s%s' % (s, file.split('%s_%s' \
                                       % (p, sources[p][s][0]))[1])
                     try:
@@ -211,7 +213,7 @@ def export_files(session, archive, clpool, progress=False):
     for p in unpack.keys():
         package = os.path.splitext(os.path.basename(p))[0].split('_')
         try:
-            unpacked = UnpackedSource(p)
+            unpacked = UnpackedSource(p, clpool)
             tempdir = unpacked.get_root_directory()
             stats['unpack'] += 1
             if progress:
@@ -240,20 +242,27 @@ def export_files(session, archive, clpool, progress=False):
             print 'make-changelog: unable to unpack %s\n%s' % (p, e)
             stats['errors'] += 1
 
-    for root, dirs, files in os.walk(clpool):
+    for root, dirs, files in os.walk(clpool, topdown=False):
+        files = [f for f in files if f != filelist]
         if len(files):
-            if root.split('/')[-1] not in sources.keys():
-                if os.path.exists(root):
-                    rmtree(root)
-                    stats['removed'] += 1
+            if root != clpool:
+                if root.split('/')[-1] not in sources.keys():
+                    if os.path.exists(root):
+                        stats['removed'] += len(os.listdir(root))
+                        rmtree(root)
             for file in files:
                 if os.path.exists(os.path.join(root, file)):
                     if os.stat(os.path.join(root, file)).st_nlink ==  1:
-                        os.unlink(os.path.join(root, file))
                         stats['removed'] += 1
-
-    for root, dirs, files in os.walk(clpool):
+                        os.unlink(os.path.join(root, file))
+        for dir in dirs:
+            try:
+                os.rmdir(os.path.join(root, dir))
+            except OSError:
+                pass
         stats['files'] += len(files)
+    stats['files'] -= stats['removed']
+
     print 'make-changelog: file exporting finished'
     print '  * New packages unpacked: %d' % stats['unpack']
     print '  * New files created: %d' % stats['created']
@@ -261,9 +270,28 @@ def export_files(session, archive, clpool, progress=False):
     print '  * Unpack errors: %d' % stats['errors']
     print '  * Files available into changelog pool: %d' % stats['files']
 
+def generate_export_filelist(clpool):
+    clfiles = {}
+    for root, dirs, files in os.walk(clpool):
+        for file in [f for f in files if f != filelist]:
+            clpath = os.path.join(root, file).replace(clpool, '').strip('/')
+            source = clpath.split('/')[2]
+            elements = clpath.split('/')[3].split('_')
+            if source not in clfiles:
+                clfiles[source] = {}
+            if elements[0] == source:
+                if elements[1] not in clfiles[source]:
+                    clfiles[source][elements[1]] = []
+                clfiles[source][elements[1]].append(clpath)
+            else:
+                if elements[0] not in clfiles[source]:
+                    clfiles[source][elements[0]] = []
+                clfiles[source][elements[0]].append(clpath)
+    with open(os.path.join(clpool, filelist), 'w+') as fd:
+        safe_dump(clfiles, fd, default_flow_style=False)
+
 def main():
     Cnf = utils.get_conf()
-    cnf = Config()
     Arguments = [('h','help','Make-Changelog::Options::Help'),
                  ('a','archive','Make-Changelog::Options::Archive','HasArg'),
                  ('s','suite','Make-Changelog::Options::Suite','HasArg'),
@@ -294,10 +322,11 @@ def main():
     session = DBConn().session()
 
     if export:
-        if cnf.exportpath:
-            archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one()
-            exportpath = os.path.join(Cnf['Dir::Export'], cnf.exportpath)
+        archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one()
+        exportpath = archive.changelog
+        if exportpath:
             export_files(session, archive, exportpath, progress)
+            generate_export_filelist(exportpath)
         else:
             utils.fubar('No changelog export path defined')
     elif binnmu: