]> git.decadent.org.uk Git - dak.git/blobdiff - dak/generate_filelist.py
daklib/queue.py: Remove unneeded imports.
[dak.git] / dak / generate_filelist.py
index 2a6d218badf2e6e12a6d2b9a43151a4400d4906c..faf08bd7a8680e59802d4bdf8c12d4ec0a68a21b 100755 (executable)
@@ -39,7 +39,7 @@ Generate file lists for apt-ftparchive.
 from daklib.dbconn import *
 from daklib.config import Config
 from daklib import utils, daklog
-from multiprocessing import Pool
+from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS, PROC_STATUS_SIGNALRAISED
 import apt_pkg, os, stat, sys
 
 from daklib.lists import getSources, getBinaries, getArchAll
@@ -72,11 +72,13 @@ def writeSourceList(suite_id, component_id, incremental_mode):
     (file, timestamp) = listPath(suite, component,
             incremental_mode = incremental_mode)
 
+    message = "sources list for %s %s" % (suite.suite_name, component.component_name)
+
     for _, filename in getSources(suite, component, session, timestamp):
         file.write(filename + '\n')
-    session.close()
+    session.rollback()
     file.close()
-    return "sources list for %s %s" % (suite.suite_name, component.component_name)
+    return (PROC_STATUS_SUCCESS, message)
 
 def writeAllList(suite_id, component_id, architecture_id, type, incremental_mode):
     session = DBConn().session()
@@ -86,12 +88,14 @@ def writeAllList(suite_id, component_id, architecture_id, type, incremental_mode
     (file, timestamp) = listPath(suite, component, architecture, type,
             incremental_mode)
 
+    message = "all list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type)
+
     for _, filename in getArchAll(suite, component, architecture, type,
             session, timestamp):
         file.write(filename + '\n')
-    session.close()
+    session.rollback()
     file.close()
-    return "all list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type)
+    return (PROC_STATUS_SUCCESS, message)
 
 def writeBinaryList(suite_id, component_id, architecture_id, type, incremental_mode):
     session = DBConn().session()
@@ -101,12 +105,14 @@ def writeBinaryList(suite_id, component_id, architecture_id, type, incremental_m
     (file, timestamp) = listPath(suite, component, architecture, type,
             incremental_mode)
 
+    message = "binary list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type)
+
     for _, filename in getBinaries(suite, component, architecture, type,
             session, timestamp):
         file.write(filename + '\n')
-    session.close()
+    session.rollback()
     file.close()
-    return "binary list for %s %s (arch=%s, type=%s)" % (suite.suite_name, component.component_name, architecture.arch_string, type)
+    return (PROC_STATUS_SUCCESS, message)
 
 def usage():
     print """Usage: dak generate_filelist [OPTIONS]
@@ -126,7 +132,7 @@ Incremental mode appends only newer files to existing lists."""
 
 def main():
     cnf = Config()
-    Logger = daklog.Logger(cnf, 'generate-filelist')
+    Logger = daklog.Logger('generate-filelist')
     Arguments = [('h', "help",         "Filelist::Options::Help"),
                  ('s', "suite",        "Filelist::Options::Suite", "HasArg"),
                  ('c', "component",    "Filelist::Options::Component", "HasArg"),
@@ -149,11 +155,11 @@ def main():
         cnf['Filelist::Options::Architecture'] = ','.join(architectures).encode()
     cnf['Filelist::Options::Help'] = ''
     cnf['Filelist::Options::Incremental'] = ''
-    apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
-    Options = cnf.SubTree("Filelist::Options")
+    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
+    Options = cnf.subtree("Filelist::Options")
     if Options['Help']:
         usage()
-    pool = Pool()
+    pool = DakProcessPool()
     query_suites = query_suites. \
         filter(Suite.suite_name.in_(utils.split_args(Options['Suite'])))
     query_components = query_components. \
@@ -161,8 +167,15 @@ def main():
     query_architectures = query_architectures. \
         filter(Architecture.arch_string.in_(utils.split_args(Options['Architecture'])))
 
-    def log(message):
-        Logger.log([message])
+    def parse_results(message):
+        # Split out into (code, msg)
+        code, msg = message
+        if code == PROC_STATUS_SUCCESS:
+            Logger.log([msg])
+        elif code == PROC_STATUS_SIGNALRAISED:
+            Logger.log(['E: Subprocess recieved signal ', msg])
+        else:
+            Logger.log(['E: ', msg])
 
     for suite in query_suites:
         suite_id = suite.suite_id
@@ -174,26 +187,31 @@ def main():
                     pass
                 elif architecture.arch_string == 'source':
                     pool.apply_async(writeSourceList,
-                        (suite_id, component_id, Options['Incremental']), callback=log)
+                        (suite_id, component_id, Options['Incremental']), callback=parse_results)
                 elif architecture.arch_string == 'all':
                     pool.apply_async(writeAllList,
                         (suite_id, component_id, architecture_id, 'deb',
-                            Options['Incremental']), callback=log)
+                            Options['Incremental']), callback=parse_results)
                     pool.apply_async(writeAllList,
                         (suite_id, component_id, architecture_id, 'udeb',
-                            Options['Incremental']), callback=log)
+                            Options['Incremental']), callback=parse_results)
                 else: # arch any
                     pool.apply_async(writeBinaryList,
                         (suite_id, component_id, architecture_id, 'deb',
-                            Options['Incremental']), callback=log)
+                            Options['Incremental']), callback=parse_results)
                     pool.apply_async(writeBinaryList,
                         (suite_id, component_id, architecture_id, 'udeb',
-                            Options['Incremental']), callback=log)
+                            Options['Incremental']), callback=parse_results)
     pool.close()
     pool.join()
+
     # this script doesn't change the database
     session.close()
 
+    Logger.close()
+
+    sys.exit(pool.overall_status())
+
 if __name__ == '__main__':
     main()