3 Create all the contents files
5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2008, 2009 Michael Casadevall <mcasadevall@debian.org>
7 @copyright: 2009 Mike O'Connor <stew@debian.org>
8 @license: GNU General Public License version 2 or later
11 ################################################################################
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # <Ganneff> there is the idea to slowly replace contents files
30 # <Ganneff> with a new generation of such files.
31 # <Ganneff> having more info.
33 # <Ganneff> of course that wont help for now where we need to generate them :)
35 ################################################################################
47 from daklib import utils
48 from daklib.binary import Binary
49 from daklib.config import Config
50 from daklib.dbconn import *
52 ################################################################################
54 def usage (exit_code=0):
55 print """Usage: dak contents [options] command [arguments]
59 generate Contents-$arch.gz files
62 scan the debs in the existing pool and load contents into the bin_contents table
65 copy data from the bin_contents table into the deb_contents / udeb_contents tables
68 remove files/paths which are no longer referenced by a binary
72 show this help and exit
75 show verbose information messages
78 supress all output but errors
80 -s, --suite={stable,testing,unstable,...}
81 only operate on a single suite
85 ################################################################################
87 # where in dak.conf all of our configuration will be stowed
89 options_prefix = "Contents"
90 options_prefix = "%s::Options" % options_prefix
92 log = logging.getLogger()
94 ################################################################################
96 class EndOfContents(object):
98 A sentry object for the end of the filename stream
102 class OneAtATime(object):
104 a one space queue which sits between multiple possible producers
105 and multiple possible consumers
108 self.next_in_line = None
109 self.read_lock = threading.Condition()
110 self.write_lock = threading.Condition()
113 def enqueue(self, next):
114 self.write_lock.acquire()
115 while self.next_in_line:
118 self.write_lock.wait()
120 assert( not self.next_in_line )
121 self.next_in_line = next
122 self.write_lock.release()
123 self.read_lock.acquire()
124 self.read_lock.notify()
125 self.read_lock.release()
128 self.read_lock.acquire()
129 while not self.next_in_line:
132 self.read_lock.wait()
134 result = self.next_in_line
136 self.next_in_line = None
137 self.read_lock.release()
138 self.write_lock.acquire()
139 self.write_lock.notify()
140 self.write_lock.release()
145 class ContentsWorkThread(threading.Thread):
148 def __init__(self, upstream, downstream):
149 threading.Thread.__init__(self)
150 self.upstream = upstream
151 self.downstream = downstream
156 contents_file = self.upstream.dequeue()
157 if isinstance(contents_file,EndOfContents):
159 self.downstream.enqueue(contents_file)
162 s = datetime.datetime.now()
163 print("%s start: %s" % (self,contents_file) )
164 self._run(contents_file)
165 print("%s finished: %s in %d seconds" % (self, contents_file, (datetime.datetime.now()-s).seconds ))
167 self.downstream.enqueue(contents_file)
169 traceback.print_exc()
171 class QueryThread(ContentsWorkThread):
172 def __init__(self, upstream, downstream):
173 ContentsWorkThread.__init__(self, upstream, downstream)
179 def _run(self, contents_file):
180 contents_file.query()
182 class IngestThread(ContentsWorkThread):
183 def __init__(self, upstream, downstream):
184 ContentsWorkThread.__init__(self, upstream, downstream)
187 return "IngestThread"
190 def _run(self, contents_file):
191 contents_file.ingest()
193 class SortThread(ContentsWorkThread):
194 def __init__(self, upstream, downstream):
195 ContentsWorkThread.__init__(self, upstream, downstream)
201 def _run(self, contents_file):
202 contents_file.sorted_keys = sorted(contents_file.filenames.keys())
204 class OutputThread(ContentsWorkThread):
205 def __init__(self, upstream, downstream):
206 ContentsWorkThread.__init__(self, upstream, downstream)
209 return "OutputThread"
212 def _run(self, contents_file):
213 contents_file.open_file()
214 for fname in contents_file.sorted_keys:
215 contents_file.filehandle.write("%s\t%s\n" % (fname,contents_file.filenames[fname]))
216 contents_file.sorted_keys = None
217 contents_file.filenames.clear()
219 class GzipThread(ContentsWorkThread):
220 def __init__(self, upstream, downstream):
221 ContentsWorkThread.__init__(self, upstream, downstream)
227 def _run(self, contents_file):
228 os.system("gzip -f %s" % contents_file.filename)
230 class ContentFile(object):
236 self.filename = filename
238 self.sorted_keys = None
239 self.suite_str = suite_str
240 self.suite_id = suite_id
242 self.filehandle = None
251 self.filenames = None
252 self.sortedkeys = None
253 self.filehandle.close()
258 r = self.results.fetchone()
261 filename, package = r
262 self.filenames[filename]=package
268 opens a gzip stream to the contents file
270 filepath = Config()["Contents::Root"] + self.filename
271 filedir = os.path.dirname(self.filename)
272 if not os.path.isdir(filedir):
274 self.filehandle = open(self.filename, "w")
277 def _write_header(self):
279 self.filehandle.write(ContentFile.header)
284 def _get_header(self):
286 Internal method to return the header for Contents.gz files
288 This is boilerplate which explains the contents of the file and how
291 if not ContentFile.header:
292 if Config().has_key("Contents::Header"):
294 h = open(os.path.join( Config()["Dir::Templates"],
295 Config()["Contents::Header"] ), "r")
296 ContentFile.header = h.read()
299 log.error( "error opening header file: %d\n%s" % (Config()["Contents::Header"],
300 traceback.format_exc() ))
301 ContentFile.header = None
303 ContentFile.header = None
305 return ContentFile.header
308 class DebContentFile(ContentFile):
315 ContentFile.__init__(self,
319 self.arch_str = arch_str
320 self.arch_id = arch_id
323 self.session = DBConn().session();
325 self.results = self.session.execute("""SELECT filename, comma_separated_list(section || '/' || package)
327 WHERE ( arch=2 or arch = :arch) AND suite = :suite
328 """, { 'arch':self.arch_id, 'suite':self.suite_id } )
330 class UdebContentFile(ContentFile):
337 ContentFile.__init__(self,
343 self.session = DBConn().session();
345 self.results = self.session.execute("""SELECT filename, comma_separated_list(section || '/' || package)
349 """ , { 'suite': self.suite_id } )
351 class Contents(object):
353 Class capable of generating Contents-$arch.gz files
358 def reject(self, message):
359 log.error("E: %s" % message)
363 remove files/paths from the DB which are no longer referenced
364 by binaries and clean the temporary table
366 s = DBConn().session()
368 # clear out all of the temporarily stored content associations
369 # this should be run only after p-a has run. after a p-a
370 # run we should have either accepted or rejected every package
371 # so there should no longer be anything in the queue
372 s.query(PendingContentAssociation).delete()
374 # delete any filenames we are storing which have no binary associated
376 cafq = s.query(ContentAssociation.filename_id).distinct()
377 cfq = s.query(ContentFilename)
378 cfq = cfq.filter(~ContentFilename.cafilename_id.in_(cafq))
381 # delete any paths we are storing which have no binary associated with
383 capq = s.query(ContentAssociation.filepath_id).distinct()
384 cpq = s.query(ContentFilepath)
385 cpq = cpq.filter(~ContentFilepath.cafilepath_id.in_(capq))
391 def bootstrap_bin(self):
393 scan the existing debs in the pool to populate the bin_contents table
395 pooldir = Config()[ 'Dir::Pool' ]
397 s = DBConn().session()
399 for binary in s.query(DBBinary).yield_per(100):
400 print( "binary: %s" % binary.package )
401 filename = binary.poolfile.filename
402 # Check for existing contents
403 existingq = s.execute( "select 1 from bin_contents where binary_id=:id", {'id':binary.binary_id} );
404 if existingq.fetchone():
405 log.debug( "already imported: %s" % (filename))
407 # We don't have existing contents so import them
408 log.debug( "scanning: %s" % (filename) )
410 debfile = os.path.join(pooldir, filename)
411 if os.path.exists(debfile):
412 Binary(debfile, self.reject).scan_package(binary.binary_id, True)
414 log.error("missing .deb: %s" % filename)
420 scan the existing debs in the pool to populate the contents database tables
422 s = DBConn().session()
425 # get a mapping of all the override types we care about (right now .deb an .udeb)
426 override_type_map = {};
427 for override_type in s.query(OverrideType).all():
428 if override_type.overridetype.endswith('deb' ):
429 override_type_map[override_type.overridetype_id] = override_type.overridetype;
431 for override in s.query(Override).yield_per(100):
432 if not override_type_map.has_key(override.overridetype_id):
433 #this isn't an override we care about
436 binaries = s.execute("""SELECT b.id, b.architecture
438 JOIN bin_associations ba ON ba.bin=b.id
439 WHERE ba.suite=:suite
440 AND b.package=:package""", {'suite':override.suite_id, 'package':override.package})
442 binary = binaries.fetchone()
446 exists = s.execute("SELECT 1 FROM %s_contents WHERE binary_id=:id limit 1" % override_type_map[override.overridetype_id], {'id':binary.id})
449 if exists.fetchone():
455 s.execute( """INSERT INTO %s_contents (filename,section,package,binary_id,arch,suite)
456 SELECT file, :section, :package, :binary_id, :arch, :suite
458 WHERE binary_id=:binary_id;""" % override_type_map[override.overridetype_id],
459 { 'section' : override.section_id,
460 'package' : override.package,
461 'binary_id' : binary.id,
462 'arch' : binary.architecture,
463 'suite' : override.suite_id } )
468 Generate contents files for both deb and udeb
473 def deb_generate(self):
475 Generate Contents-$arch.gz files for every available arch in each given suite.
477 session = DBConn().session()
478 debtype_id = get_override_type("deb", session)
479 suites = self._suites()
481 inputtoquery = OneAtATime()
482 querytoingest = OneAtATime()
483 ingesttosort = OneAtATime()
484 sorttooutput = OneAtATime()
485 outputtogzip = OneAtATime()
487 qt = QueryThread(inputtoquery,querytoingest)
488 it = IngestThread(querytoingest,ingesttosort)
489 st = SortThread(ingesttosort,sorttooutput)
490 ot = OutputThread(sorttooutput,outputtogzip)
491 gt = GzipThread(outputtogzip, None)
499 # Get our suites, and the architectures
500 for suite in [i.lower() for i in suites]:
501 suite_id = get_suite(suite, session).suite_id
502 print( "got suite_id: %s for suite: %s" % (suite_id, suite ) )
503 arch_list = self._arches(suite_id, session)
505 for (arch_id,arch_str) in arch_list:
506 print( "suite: %s, arch: %s time: %s" %(suite_id, arch_id, datetime.datetime.now().isoformat()) )
508 filename = "dists/%s/Contents-%s" % (suite, arch_str)
509 cf = DebContentFile(filename, suite, suite_id, arch_str, arch_id)
510 inputtoquery.enqueue( cf )
512 inputtoquery.enqueue( EndOfContents() )
515 def udeb_generate(self):
517 Generate Contents-$arch.gz files for every available arch in each given suite.
519 session = DBConn().session()
520 udebtype_id=DBConn().get_override_type_id("udeb")
521 suites = self._suites()
523 inputtoquery = OneAtATime()
524 querytoingest = OneAtATime()
525 ingesttosort = OneAtATime()
526 sorttooutput = OneAtATime()
527 outputtogzip = OneAtATime()
529 qt = QueryThread(inputtoquery,querytoingest)
530 it = IngestThread(querytoingest,ingesttosort)
531 st = SortThread(ingesttosort,sorttooutput)
532 ot = OutputThread(sorttooutput,outputtogzip)
533 gt = GzipThread(outputtogzip, None)
544 Generate Contents-$arch.gz files for every available arch in each given suite.
546 session = DBConn().session()
548 arch_all_id = get_architecture("all", session).arch_id
550 # The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free)
551 # This is HORRIBLY debian specific :-/
552 for dtype, section, fn_pattern in \
553 [('deb', None, "dists/%s/Contents-%s.gz"),
554 ('udeb', "debian-installer", "dists/%s/Contents-udeb-%s.gz"),
555 ('udeb', "non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]:
557 overridetype = get_override_type(dtype, session)
559 # For udebs, we only look in certain sections (see the for loop above)
560 if section is not None:
561 section = get_section(section, session)
564 for suite in which_suites(session):
565 # Which architectures do we need to work on
566 arch_list = get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=session)
568 # Set up our file writer dictionary
571 # One file writer per arch
572 for arch in arch_list:
573 file_writers[arch.arch_id] = GzippedContentWriter(fn_pattern % (suite, arch.arch_string))
575 for r in get_suite_contents(suite, overridetype, section, session=session).fetchall():
576 filename, section, package, arch_id = r
578 if arch_id == arch_all_id:
579 # It's arch all, so all contents files get it
580 for writer in file_writers.values():
581 writer.write(filename, section, package)
583 if file_writers.has_key(arch_id):
584 file_writers[arch_id].write(filename, section, package)
587 # close all the files
588 for writer in file_writers.values():
592 return a list of suites to operate on
594 if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
595 suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
597 suites = Config().SubTree("Suite").List()
601 def _arches(self, suite, session):
603 return a list of archs to operate on
606 arches = session.execute(
607 """SELECT s.architecture, a.arch_string
608 FROM suite_architectures s
609 JOIN architecture a ON (s.architecture=a.id)
610 WHERE suite = :suite_id""",
611 {'suite_id':suite } )
614 r = arches.fetchone()
618 if r[1] != "source" and r[1] != "all":
619 arch_list.append((r[0], r[1]))
624 ################################################################################
629 arguments = [('h',"help", "%s::%s" % (options_prefix,"Help")),
630 ('s',"suite", "%s::%s" % (options_prefix,"Suite"),"HasArg"),
631 ('q',"quiet", "%s::%s" % (options_prefix,"Quiet")),
632 ('v',"verbose", "%s::%s" % (options_prefix,"Verbose")),
635 commands = {'generate' : Contents.generate,
636 'bootstrap_bin' : Contents.bootstrap_bin,
637 'bootstrap' : Contents.bootstrap,
638 'cruft' : Contents.cruft,
641 args = apt_pkg.ParseCommandLine(cnf.Cnf, arguments,sys.argv)
643 if (len(args) < 1) or not commands.has_key(args[0]):
646 if cnf.has_key("%s::%s" % (options_prefix,"Help")):
650 if cnf.has_key("%s::%s" % (options_prefix,"Quiet")):
653 elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")):
657 logging.basicConfig( level=level,
658 format='%(asctime)s %(levelname)s %(message)s',
659 stream = sys.stderr )
661 commands[args[0]](Contents())
663 def which_suites(session):
665 return a list of suites to operate on
667 if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
668 suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
670 suites = Config().SubTree("Suite").List()
672 return [get_suite(s.lower(), session) for s in suites]
675 if __name__ == '__main__':