+ for override in s.query(Override).all():
+ binaries = s.execute("""SELECT b.binary_id, ba.arch
+ FROM binaries b
+ JOIN bin_associations ba ON ba.binary_id=b.binary_id
+ WHERE ba.suite=:suite
+ AND b.package=override.package""", {'suite':override.suite})
+ while True:
+ binary = binaries.fetchone()
+ if not binary:
+ break
+
+ filenames = s.execute( """SELECT file from bin_contents where binary_id=:id""", { 'id': binary.binary_id } )
+ while True:
+ filename = filenames.fetchone()
+ if not binary:
+ break
+
+
+
+ if override.type == 7:
+ s.execute( """INSERT INTO deb_contents (file,section,package,binary_id,arch,suite,component)
+ VALUES (:filename, :section, :package, :binary_id, :arch, :suite, :component);""",
+ { 'filename' : filename,
+ 'section' : override.section,
+ 'package' : override.package,
+ 'binary_id' : binary.binary_id,
+ 'arch' : binary.arch,
+ 'suite' : override.suite,
+ 'component' : override.component } )
+
+
+ elif override.type == 9:
+ s.execute( """INSERT INTO deb_contents (file,section,package,binary_id,arch,suite,component)
+ VALUES (:filename, :section, :package, :binary_id, :arch, :suite, :component);""",
+ { 'filename' : filename,
+ 'section' : override.section,
+ 'package' : override.package,
+ 'binary_id' : binary.binary_id,
+ 'arch' : binary.arch,
+ 'suite' : override.suite,
+ 'component' : override.component } )
+
+# def bootstrap(self):
+# """
+# scan the existing debs in the pool to populate the contents database tables
+# """
+# pooldir = Config()[ 'Dir::Pool' ]
+
+# s = DBConn().session()
+
+# for suite in s.query(Suite).all():
+# for arch in get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=s):
+# q = s.query(BinAssociation).join(Suite)
+# q = q.join(Suite).filter_by(suite_name=suite.suite_name)
+# q = q.join(DBBinary).join(Architecture).filter_by(arch.arch_string)
+# for ba in q:
+# filename = ba.binary.poolfile.filename
+# # Check for existing contents
+# existingq = s.query(ContentAssociations).filter_by(binary_pkg=ba.binary_id).limit(1)
+# if existingq.count() > 0:
+# log.debug( "already imported: %s" % (filename))
+# else:
+# # We don't have existing contents so import them
+# log.debug( "scanning: %s" % (filename) )
+# debfile = os.path.join(pooldir, filename)
+# if os.path.exists(debfile):
+# Binary(debfile, self.reject).scan_package(ba.binary_id, True)
+# else:
+# log.error("missing .deb: %s" % filename)
+ def generate(self):
+ """
+ Generate contents files for both deb and udeb
+ """
+ DBConn().prepare("arches_q", arches_q)
+ self.deb_generate()
+# self.udeb_generate()
+
+ def deb_generate(self):
+ """
+ Generate Contents-$arch.gz files for every available arch in each given suite.
+ """
+ cursor = DBConn().session()
+ debtype_id = DBConn().get_override_type_id("deb")
+ suites = self._suites()
+
+ inputtoquery = OneAtATime()
+ querytoingest = OneAtATime()
+ ingesttosort = OneAtATime()
+ sorttooutput = OneAtATime()
+ outputtogzip = OneAtATime()
+
+ qt = QueryThread(inputtoquery,querytoingest)
+ it = IngestThread(querytoingest,ingesttosort)
+# these actually make things worse
+# it2 = IngestThread(querytoingest,ingesttosort)
+# it3 = IngestThread(querytoingest,ingesttosort)
+# it4 = IngestThread(querytoingest,ingesttosort)
+ st = SortThread(ingesttosort,sorttooutput)
+ ot = OutputThread(sorttooutput,outputtogzip)
+ gt = GzipThread(outputtogzip, None)
+
+ qt.start()
+ it.start()
+# it2.start()
+# it3.start()
+# it2.start()
+ st.start()
+ ot.start()
+ gt.start()
+
+ # Get our suites, and the architectures
+ for suite in [i.lower() for i in suites]:
+ suite_id = DBConn().get_suite_id(suite)
+ arch_list = self._arches(cursor, suite_id)
+
+ for (arch_id,arch_str) in arch_list:
+ print( "suite: %s, arch: %s time: %s" %(suite_id, arch_id, datetime.datetime.now().isoformat()) )
+
+# filename = "dists/%s/Contents-%s.gz" % (suite, arch_str)
+ filename = "dists/%s/Contents-%s" % (suite, arch_str)
+ cf = ContentFile(filename, suite, suite_id, arch_str, arch_id)
+ inputtoquery.enqueue( cf )
+
+ inputtoquery.enqueue( EndOfContents() )
+ gt.join()
+
+ def udeb_generate(self):
+ """
+ Generate Contents-$arch.gz files for every available arch in each given suite.
+ """
+ cursor = DBConn().session()
+ udebtype_id=DBConn().get_override_type_id("udeb")
+ suites = self._suites()
+
+ inputtoquery = OneAtATime()
+ querytoingest = OneAtATime()
+ ingesttosort = OneAtATime()
+ sorttooutput = OneAtATime()
+ outputtogzip = OneAtATime()
+
+ qt = QueryThread(inputtoquery,querytoingest)
+ it = IngestThread(querytoingest,ingesttosort)
+# these actually make things worse
+# it2 = IngestThread(querytoingest,ingesttosort)
+# it3 = IngestThread(querytoingest,ingesttosort)
+# it4 = IngestThread(querytoingest,ingesttosort)
+ st = SortThread(ingesttosort,sorttooutput)
+ ot = OutputThread(sorttooutput,outputtogzip)
+ gt = GzipThread(outputtogzip, None)
+
+ qt.start()
+ it.start()
+# it2.start()
+# it3.start()
+# it2.start()
+ st.start()
+ ot.start()
+ gt.start()
+
+ for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s"),
+ ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s")]:
+
+ section_id = DBConn().get_section_id(section) # all udebs should be here)
+ if section_id != -1:
+
+
+
+ # Get our suites, and the architectures
+ for suite in [i.lower() for i in suites]:
+ suite_id = DBConn().get_suite_id(suite)
+ arch_list = self._arches(cursor, suite_id)
+
+ for arch_id in arch_list:
+
+ writer = GzippedContentWriter(fn_pattern % (suite, arch_id[1]))
+ try:
+
+ cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (suite_id, udebtype_id, section_id, arch_id))
+
+ while True:
+ r = cursor.fetchone()
+ if not r:
+ break
+
+ filename, section, package, arch = r
+ writer.write(filename, section, package)
+ finally:
+ writer.close()
+
+