]> git.decadent.org.uk Git - dak.git/blobdiff - dak/make_changelog.py
Use correct db_name for MD5 hash
[dak.git] / dak / make_changelog.py
index 8fa172f63299b8ecdb6d7bfdb26e228e5531ca12..ca8ed95eeadaf92907e50df2abbb203b2af8adab 100755 (executable)
@@ -52,22 +52,24 @@ Generate changelog entry between two suites
 import os
 import sys
 import apt_pkg
-from commands import getstatusoutput
 from glob import glob
-from re import split
 from shutil import rmtree
+from yaml import safe_dump
 from daklib.dbconn import *
 from daklib import utils
-from daklib.config import Config
+from daklib.contents import UnpackedSource
+from daklib.regexes import re_no_epoch
 
 ################################################################################
 
+filelist = 'filelist.yaml'
+
 def usage (exit_code=0):
     print """Generate changelog between two suites
 
        Usage:
        make-changelog -s <suite> -b <base_suite> [OPTION]...
-       make-changelog -e
+       make-changelog -e -a <archive>
 
 Options:
 
@@ -76,7 +78,9 @@ Options:
   -b, --base-suite          suite to be taken as reference for comparison
   -n, --binnmu              display binNMUs uploads instead of source ones
 
-  -e, --export              export interesting files from source packages"""
+  -e, --export              export interesting files from source packages
+  -a, --archive             archive to fetch data from
+  -p, --progress            display progress status"""
 
     sys.exit(exit_code)
 
@@ -160,101 +164,153 @@ def display_changes(uploads, index):
         print upload[index]
         prev_upload = upload[0]
 
-def export_files(session, pool, clpool, temppath):
+def export_files(session, archive, clpool, progress=False):
     """
     Export interesting files from source packages.
     """
+    pool = os.path.join(archive.path, 'pool')
 
     sources = {}
-    query = """SELECT s.source, su.suite_name AS suite, s.version, f.filename
+    unpack = {}
+    files = ('changelog', 'copyright', 'NEWS', 'NEWS.Debian', 'README.Debian')
+    stats = {'unpack': 0, 'created': 0, 'removed': 0, 'errors': 0, 'files': 0}
+    query = """SELECT DISTINCT s.source, su.suite_name AS suite, s.version, c.name || '/' || f.filename AS filename
                FROM source s
+               JOIN newest_source n ON n.source = s.source AND n.version = s.version
                JOIN src_associations sa ON sa.source = s.id
                JOIN suite su ON su.id = sa.suite
                JOIN files f ON f.id = s.file
+               JOIN files_archive_map fam ON f.id = fam.file_id AND fam.archive_id = su.archive_id
+               JOIN component c ON fam.component_id = c.id
+               WHERE su.archive_id = :archive_id
                ORDER BY s.source, suite"""
 
-    for p in session.execute(query):
+    for p in session.execute(query, {'archive_id': archive.archive_id}):
         if not sources.has_key(p[0]):
             sources[p[0]] = {}
-        sources[p[0]][p[1]] = (p[2], p[3])
-
-    tempdir = utils.temp_dirname(parent=temppath)
-    os.rmdir(tempdir)
+        sources[p[0]][p[1]] = (re_no_epoch.sub('', p[2]), p[3])
 
     for p in sources.keys():
         for s in sources[p].keys():
-            files = (('changelog', True),
-                     ('copyright', True),
-                     ('NEWS.Debian', False),
-                     ('README.Debian', False))
-            path = os.path.join(clpool, sources[p][s][1].split('/')[0], \
-                                split('(^lib\S|^\S)', p)[1], p)
+            path = os.path.join(clpool, '/'.join(sources[p][s][1].split('/')[:-1]))
             if not os.path.exists(path):
                 os.makedirs(path)
+            if not os.path.exists(os.path.join(path, \
+                   '%s_%s_changelog' % (p, sources[p][s][0]))):
+                if not unpack.has_key(os.path.join(pool, sources[p][s][1])):
+                    unpack[os.path.join(pool, sources[p][s][1])] = (path, set())
+                unpack[os.path.join(pool, sources[p][s][1])][1].add(s)
+            else:
+                for file in glob('%s/%s_%s_*' % (path, p, sources[p][s][0])):
+                    link = '%s%s' % (s, file.split('%s_%s' \
+                                      % (p, sources[p][s][0]))[1])
+                    try:
+                        os.unlink(os.path.join(path, link))
+                    except OSError:
+                        pass
+                    os.link(os.path.join(path, file), os.path.join(path, link))
+
+    for p in unpack.keys():
+        package = os.path.splitext(os.path.basename(p))[0].split('_')
+        try:
+            unpacked = UnpackedSource(p, clpool)
+            tempdir = unpacked.get_root_directory()
+            stats['unpack'] += 1
+            if progress:
+                if stats['unpack'] % 100 == 0:
+                    sys.stderr.write('%d packages unpacked\n' % stats['unpack'])
+                elif stats['unpack'] % 10 == 0:
+                    sys.stderr.write('.')
             for file in files:
-                for f in glob(os.path.join(path, s + '.*')):
-                    os.unlink(f)
-            try:
-                for file in files:
-                    t = os.path.join(path, '%s_%s.*%s' % (p, sources[p][s][0], file[0]))
-                    if file[1] and not glob(t):
-                        raise OSError
-                    else:
-                        for f in glob(t):
-                            os.link(f, os.path.join(path, '%s.%s' % \
-                                    (s, os.path.basename(f).split('%s_%s.' \
-                                    % (p, sources[p][s][0]))[1])))
-            except OSError:
-                cmd = 'dpkg-source --no-check --no-copy -x %s %s' \
-                      % (os.path.join(pool, sources[p][s][1]), tempdir)
-                (result, output) = getstatusoutput(cmd)
-                if not result:
-                    for file in files:
+                for f in glob(os.path.join(tempdir, 'debian', '*%s' % file)):
+                    for s in unpack[p][1]:
+                        suite = os.path.join(unpack[p][0], '%s_%s' \
+                                % (s, os.path.basename(f)))
+                        version = os.path.join(unpack[p][0], '%s_%s_%s' % \
+                                  (package[0], package[1], os.path.basename(f)))
+                        if not os.path.exists(version):
+                            os.link(f, version)
+                            stats['created'] += 1
                         try:
-                            for f in glob(os.path.join(tempdir, 'debian', '*' + file[0])):
-                                for dest in os.path.join(path, '%s_%s.%s' \
-                                            % (p, sources[p][s][0], os.path.basename(f))), \
-                                            os.path.join(path, '%s.%s' % (s, os.path.basename(f))):
-                                    if not os.path.exists(dest):
-                                        os.link(f, dest)
-                        except:
-                            print 'make-changelog: unable to extract %s for %s_%s' \
-                                   % (os.path.basename(f), p, sources[p][s][0])
-                else:
-                    print 'make-changelog: unable to unpack %s_%s' % (p, sources[p][s][0])
-                    continue
-
-                rmtree(tempdir)
-
-    for root, dirs, files in os.walk(clpool):
+                            os.unlink(suite)
+                        except OSError:
+                            pass
+                        os.link(version, suite)
+                        stats['created'] += 1
+            unpacked.cleanup()
+        except Exception as e:
+            print 'make-changelog: unable to unpack %s\n%s' % (p, e)
+            stats['errors'] += 1
+
+    for root, dirs, files in os.walk(clpool, topdown=False):
+        files = [f for f in files if f != filelist]
         if len(files):
-            if root.split('/')[-1] not in sources.keys():
-                if os.path.exists(root):
-                    rmtree(root)
+            if root != clpool:
+                if root.split('/')[-1] not in sources.keys():
+                    if os.path.exists(root):
+                        stats['removed'] += len(os.listdir(root))
+                        rmtree(root)
             for file in files:
                 if os.path.exists(os.path.join(root, file)):
                     if os.stat(os.path.join(root, file)).st_nlink ==  1:
+                        stats['removed'] += 1
                         os.unlink(os.path.join(root, file))
+        for dir in dirs:
+            try:
+                os.rmdir(os.path.join(root, dir))
+            except OSError:
+                pass
+        stats['files'] += len(files)
+    stats['files'] -= stats['removed']
+
+    print 'make-changelog: file exporting finished'
+    print '  * New packages unpacked: %d' % stats['unpack']
+    print '  * New files created: %d' % stats['created']
+    print '  * New files removed: %d' % stats['removed']
+    print '  * Unpack errors: %d' % stats['errors']
+    print '  * Files available into changelog pool: %d' % stats['files']
+
+def generate_export_filelist(clpool):
+    clfiles = {}
+    for root, dirs, files in os.walk(clpool):
+        for file in [f for f in files if f != filelist]:
+            clpath = os.path.join(root, file).replace(clpool, '').strip('/')
+            source = clpath.split('/')[2]
+            elements = clpath.split('/')[3].split('_')
+            if source not in clfiles:
+                clfiles[source] = {}
+            if elements[0] == source:
+                if elements[1] not in clfiles[source]:
+                    clfiles[source][elements[1]] = []
+                clfiles[source][elements[1]].append(clpath)
+            else:
+                if elements[0] not in clfiles[source]:
+                    clfiles[source][elements[0]] = []
+                clfiles[source][elements[0]].append(clpath)
+    with open(os.path.join(clpool, filelist), 'w+') as fd:
+        safe_dump(clfiles, fd, default_flow_style=False)
 
 def main():
     Cnf = utils.get_conf()
-    cnf = Config()
     Arguments = [('h','help','Make-Changelog::Options::Help'),
+                 ('a','archive','Make-Changelog::Options::Archive','HasArg'),
                  ('s','suite','Make-Changelog::Options::Suite','HasArg'),
                  ('b','base-suite','Make-Changelog::Options::Base-Suite','HasArg'),
                  ('n','binnmu','Make-Changelog::Options::binNMU'),
-                 ('e','export','Make-Changelog::Options::export')]
+                 ('e','export','Make-Changelog::Options::export'),
+                 ('p','progress','Make-Changelog::Options::progress')]
 
-    for i in ['help', 'suite', 'base-suite', 'binnmu', 'export']:
+    for i in ['help', 'suite', 'base-suite', 'binnmu', 'export', 'progress']:
         if not Cnf.has_key('Make-Changelog::Options::%s' % (i)):
             Cnf['Make-Changelog::Options::%s' % (i)] = ''
 
-    apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
-    Options = Cnf.SubTree('Make-Changelog::Options')
+    apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)
+    Options = Cnf.subtree('Make-Changelog::Options')
     suite = Cnf['Make-Changelog::Options::Suite']
     base_suite = Cnf['Make-Changelog::Options::Base-Suite']
     binnmu = Cnf['Make-Changelog::Options::binNMU']
     export = Cnf['Make-Changelog::Options::export']
+    progress = Cnf['Make-Changelog::Options::progress']
 
     if Options['help'] or not (suite and base_suite) and not export:
         usage()
@@ -266,9 +322,11 @@ def main():
     session = DBConn().session()
 
     if export:
-        if cnf.exportpath:
-            exportpath = os.path.join(Cnf['Dir::Export'], cnf.exportpath)
-            export_files(session, Cnf['Dir::Pool'], exportpath, Cnf['Dir::TempPath'])
+        archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one()
+        exportpath = archive.changelog
+        if exportpath:
+            export_files(session, archive, exportpath, progress)
+            generate_export_filelist(exportpath)
         else:
             utils.fubar('No changelog export path defined')
     elif binnmu: