+class SourceContentsWriter(object):
+ '''
+ SourceContentsWriter writes the Contents-source.gz files.
+ '''
+ def __init__(self, suite, component):
+ self.suite = suite
+ self.component = component
+ self.session = suite.session()
+
+ def query(self):
+ '''
+ Returns a query object that is doing most of the work.
+ '''
+ params = {
+ 'suite_id': self.suite.suite_id,
+ 'component_id': self.component.component_id,
+ }
+
+ sql_create_temp = '''
+create temp table newest_sources (
+ id integer primary key,
+ source text);
+
+create index sources_binaries_by_source on newest_sources (source);
+
+insert into newest_sources (id, source)
+ select distinct on (source) s.id, s.source from source s
+ join files_archive_map af on s.file = af.file_id
+ where s.id in (select source from src_associations where suite = :suite_id)
+ and af.component_id = :component_id
+ order by source, version desc;'''
+ self.session.execute(sql_create_temp, params=params)
+
+ sql = '''
+select sc.file, string_agg(s.source, ',' order by s.source) as pkglist
+ from newest_sources s, src_contents sc
+ where s.id = sc.source_id group by sc.file'''
+
+ return self.session.query("file", "pkglist").from_statement(sql). \
+ params(params)
+
+ def formatline(self, filename, package_list):
+ '''
+ Returns a formatted string for the filename argument.
+ '''
+ return "%s\t%s\n" % (filename, package_list)
+
+ def fetch(self):
+ '''
+ Yields a new line of the Contents-source.gz file in filename order.
+ '''
+ for filename, package_list in self.query().yield_per(100):
+ yield self.formatline(filename, package_list)
+ # end transaction to return connection to pool
+ self.session.rollback()
+
+ def get_list(self):
+ '''
+ Returns a list of lines for the Contents-source.gz file.
+ '''
+ return [item for item in self.fetch()]
+
+ def writer(self):
+ '''
+ Returns a writer object.
+ '''
+ values = {
+ 'archive': self.suite.archive.path,
+ 'suite': self.suite.suite_name,
+ 'component': self.component.component_name
+ }
+ return SourceContentsFileWriter(**values)
+
+ def write_file(self):
+ '''
+ Write the output file.
+ '''
+ writer = self.writer()
+ file = writer.open()
+ for item in self.fetch():
+ file.write(item)
+ writer.close()
+
+
+def binary_helper(suite_id, arch_id, overridetype_id, component_id):
+ '''
+ This function is called in a new subprocess and multiprocessing wants a top
+ level function.
+ '''
+ session = DBConn().session(work_mem = 1000)
+ suite = Suite.get(suite_id, session)
+ architecture = Architecture.get(arch_id, session)
+ overridetype = OverrideType.get(overridetype_id, session)
+ component = Component.get(component_id, session)
+ log_message = [suite.suite_name, architecture.arch_string, \
+ overridetype.overridetype, component.component_name]
+ contents_writer = BinaryContentsWriter(suite, architecture, overridetype, component)
+ contents_writer.write_file()
+ session.close()
+ return log_message
+
+def source_helper(suite_id, component_id):
+ '''
+ This function is called in a new subprocess and multiprocessing wants a top
+ level function.
+ '''
+ session = DBConn().session(work_mem = 1000)
+ suite = Suite.get(suite_id, session)
+ component = Component.get(component_id, session)
+ log_message = [suite.suite_name, 'source', component.component_name]
+ contents_writer = SourceContentsWriter(suite, component)
+ contents_writer.write_file()
+ session.close()
+ return log_message
+
+class ContentsWriter(object):
+ '''
+ Loop over all suites, architectures, overridetypes, and components to write
+ all contents files.
+ '''