]> git.decadent.org.uk Git - dak.git/blobdiff - dak/make_changelog.py
Use correct db_name for MD5 hash
[dak.git] / dak / make_changelog.py
index 939e8be106aa3a8ae82d7464f919ff1f6b243a8f..ca8ed95eeadaf92907e50df2abbb203b2af8adab 100755 (executable)
@@ -57,7 +57,6 @@ from shutil import rmtree
 from yaml import safe_dump
 from daklib.dbconn import *
 from daklib import utils
-from daklib.config import Config
 from daklib.contents import UnpackedSource
 from daklib.regexes import re_no_epoch
 
@@ -173,7 +172,7 @@ def export_files(session, archive, clpool, progress=False):
 
     sources = {}
     unpack = {}
-    files = ('changelog', 'copyright', 'NEWS.Debian', 'README.Debian')
+    files = ('changelog', 'copyright', 'NEWS', 'NEWS.Debian', 'README.Debian')
     stats = {'unpack': 0, 'created': 0, 'removed': 0, 'errors': 0, 'files': 0}
     query = """SELECT DISTINCT s.source, su.suite_name AS suite, s.version, c.name || '/' || f.filename AS filename
                FROM source s
@@ -202,7 +201,7 @@ def export_files(session, archive, clpool, progress=False):
                     unpack[os.path.join(pool, sources[p][s][1])] = (path, set())
                 unpack[os.path.join(pool, sources[p][s][1])][1].add(s)
             else:
-                for file in glob('%s/%s_%s*' % (path, p, sources[p][s][0])):
+                for file in glob('%s/%s_%s_*' % (path, p, sources[p][s][0])):
                     link = '%s%s' % (s, file.split('%s_%s' \
                                       % (p, sources[p][s][0]))[1])
                     try:
@@ -214,7 +213,7 @@ def export_files(session, archive, clpool, progress=False):
     for p in unpack.keys():
         package = os.path.splitext(os.path.basename(p))[0].split('_')
         try:
-            unpacked = UnpackedSource(p)
+            unpacked = UnpackedSource(p, clpool)
             tempdir = unpacked.get_root_directory()
             stats['unpack'] += 1
             if progress:
@@ -243,7 +242,7 @@ def export_files(session, archive, clpool, progress=False):
             print 'make-changelog: unable to unpack %s\n%s' % (p, e)
             stats['errors'] += 1
 
-    for root, dirs, files in os.walk(clpool):
+    for root, dirs, files in os.walk(clpool, topdown=False):
         files = [f for f in files if f != filelist]
         if len(files):
             if root != clpool:
@@ -256,6 +255,11 @@ def export_files(session, archive, clpool, progress=False):
                     if os.stat(os.path.join(root, file)).st_nlink ==  1:
                         stats['removed'] += 1
                         os.unlink(os.path.join(root, file))
+        for dir in dirs:
+            try:
+                os.rmdir(os.path.join(root, dir))
+            except OSError:
+                pass
         stats['files'] += len(files)
     stats['files'] -= stats['removed']
 
@@ -288,7 +292,6 @@ def generate_export_filelist(clpool):
 
 def main():
     Cnf = utils.get_conf()
-    cnf = Config()
     Arguments = [('h','help','Make-Changelog::Options::Help'),
                  ('a','archive','Make-Changelog::Options::Archive','HasArg'),
                  ('s','suite','Make-Changelog::Options::Suite','HasArg'),
@@ -319,9 +322,9 @@ def main():
     session = DBConn().session()
 
     if export:
-        if cnf.exportpath:
-            archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one()
-            exportpath = os.path.join(Cnf['Dir::Export'], cnf.exportpath)
+        archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one()
+        exportpath = archive.changelog
+        if exportpath:
             export_files(session, archive, exportpath, progress)
             generate_export_filelist(exportpath)
         else: