import sys
import os
import logging
-import math
import gzip
import threading
+import traceback
import Queue
import apt_pkg
from daklib import utils
from daklib.binary import Binary
from daklib.config import Config
-from daklib.dbconn import DBConn
+from daklib.dbconn import *
+
################################################################################
def usage (exit_code=0):
generate
generate Contents-$arch.gz files
- bootstrap
- scan the debs in the existing pool and load contents in the the database
+ bootstrap_bin
+ scan the debs in the existing pool and load contents into the bin_contents table
cruft
remove files/paths which are no longer referenced by a binary
################################################################################
-# get all the arches delivered for a given suite
-# this should probably exist somehere common
-arches_q = """PREPARE arches_q(int) as
- SELECT s.architecture, a.arch_string
- FROM suite_architectures s
- JOIN architecture a ON (s.architecture=a.id)
- WHERE suite = $1"""
-
-# find me the .deb for a given binary id
-debs_q = """PREPARE debs_q(int, int) as
- SELECT b.id, f.filename FROM bin_assoc_by_arch baa
- JOIN binaries b ON baa.bin=b.id
- JOIN files f ON b.file=f.id
- WHERE suite = $1
- AND arch = $2"""
-
-# ask if we already have contents associated with this binary
-olddeb_q = """PREPARE olddeb_q(int) as
- SELECT 1 FROM content_associations
- WHERE binary_pkg = $1
- LIMIT 1"""
-
-# find me all of the contents for a given .deb
-contents_q = """PREPARE contents_q(int,int,character varying(4)) as
- SELECT (p.path||'/'||n.file) AS fn,
- s.section,
- b.package,
- b.architecture
- from content_associations c join content_file_paths p ON (c.filepath=p.id)
- JOIN content_file_names n ON (c.filename=n.id)
- JOIN binaries b ON (b.id=c.binary_pkg)
- JOIN override o ON (o.package=b.package)
- JOIN section s ON (s.id=o.section)
- WHERE o.suite = $1 AND o.type = $2
- and b.type='$3'
- ORDER BY fn"""
-
-# find me all of the contents for a given .udeb
-udeb_contents_q = """PREPARE udeb_contents_q(int,int,int,int,int) as
- SELECT (p.path||'/'||n.file) AS fn,
- comma_separated_list(s.section||'/'||b.package)
- FROM content_file_paths p join content_associations c ON (c.filepath=p.id)
- JOIN content_file_names n ON (c.filename=n.id)
- JOIN binaries b ON (b.id=c.binary_pkg)
- JOIN override o ON (o.package=b.package)
- JOIN section s ON (s.id=o.section)
- WHERE o.suite = $1 AND o.type = $2
- AND s.id = $3
- AND b.id in (SELECT ba.bin from bin_associations ba join binaries b on b.id=ba.bin where (b.architecture=$3 or b.architecture=$4)and ba.suite=$1 and b.type='udeb')
- GROUP BY fn
- ORDER BY fn;"""
-
-
-
-# clear out all of the temporarily stored content associations
-# this should be run only after p-a has run. after a p-a
-# run we should have either accepted or rejected every package
-# so there should no longer be anything in the queue
-remove_pending_contents_cruft_q = """DELETE FROM pending_content_associations"""
-
-# delete any filenames we are storing which have no binary associated with them
-remove_filename_cruft_q = """DELETE FROM content_file_names
- WHERE id IN (SELECT cfn.id FROM content_file_names cfn
- LEFT JOIN content_associations ca
- ON ca.filename=cfn.id
- WHERE ca.id IS NULL)"""
-
-# delete any paths we are storing which have no binary associated with them
-remove_filepath_cruft_q = """DELETE FROM content_file_paths
- WHERE id IN (SELECT cfn.id FROM content_file_paths cfn
- LEFT JOIN content_associations ca
- ON ca.filepath=cfn.id
- WHERE ca.id IS NULL)"""
-
class EndOfContents(object):
+ """
+ A sentry object for the end of the filename stream
+ """
pass
class GzippedContentWriter(object):
- def __init__(self, suite, arch):
+ """
+ An object which will write contents out to a Contents-$arch.gz
+ file on a separate thread
+ """
+
+ header = None # a class object holding the header section of contents file
+
+ def __init__(self, filename):
+ """
+ @type filename: string
+ @param filename: the name of the file to write to
+ """
self.queue = Queue.Queue()
self.current_file = None
self.first_package = True
- self.output = self.open_file("dists/%s/Contents-%s.gz" % (suite, arch))
+ self.output = self.open_file(filename)
self.thread = threading.Thread(target=self.write_thread,
- name='Contents-%s writer'%arch)
-
+ name='Contents writer')
self.thread.start()
def open_file(self, filename):
+ """
+ opens a gzip stream to the contents file
+ """
filepath = Config()["Contents::Root"] + filename
filedir = os.path.dirname(filepath)
if not os.path.isdir(filedir):
return gzip.open(filepath, "w")
def write(self, filename, section, package):
- self.queue.put((file,section,package))
-
+ """
+ enqueue content to be written to the file on a separate thread
+ """
+ self.queue.put((filename,section,package))
def write_thread(self):
+ """
+ the target of a Thread which will do the actual writing
+ """
while True:
- print("hi, i'm a thread" );
next = self.queue.get()
- print("GOT SOMEHTING FROM THE QUEUE" );
if isinstance(next, EndOfContents):
self.output.write('\n')
self.output.close()
break
(filename,section,package)=next
- if next != current_file:
+ if next != self.current_file:
# this is the first file, so write the header first
- if not current_file:
- self.output.write(self._getHeader)
+ if not self.current_file:
+ self.output.write(self._getHeader())
self.output.write('\n%s\t' % filename)
self.first_package = True
- if not first_package:
+
+ self.current_file=filename
+
+ if not self.first_package:
self.output.write(',')
else:
self.first_package=False
self.output.write('%s/%s' % (section,package))
+ def finish(self):
+ """
+ enqueue the sentry object so that writers will know to terminate
+ """
+ self.queue.put(EndOfContents())
+
+ @classmethod
def _getHeader(self):
"""
Internal method to return the header for Contents.gz files
This is boilerplate which explains the contents of the file and how
it can be used.
"""
- if self.header == None:
+ if not GzippedContentWriter.header:
if Config().has_key("Contents::Header"):
try:
h = open(os.path.join( Config()["Dir::Templates"],
Config()["Contents::Header"] ), "r")
- self.header = h.read()
+ GzippedContentWriter.header = h.read()
h.close()
except:
log.error( "error opening header file: %d\n%s" % (Config()["Contents::Header"],
traceback.format_exc() ))
- self.header = False
+ GzippedContentWriter.header = None
else:
- self.header = False
-
- return self.header
-
-
- def _write_content_file(self, cursor, filename):
- """
- Internal method for writing all the results to a given file.
- The cursor should have a result set generated from a query already.
- """
- filepath = Config()["Contents::Root"] + filename
- filedir = os.path.dirname(filepath)
- if not os.path.isdir(filedir):
- os.makedirs(filedir)
- f = gzip.open(filepath, "w")
- try:
- header = self._getHeader()
-
- if header:
- f.write(header)
-
- while True:
- contents = cursor.fetchone()
- if not contents:
- return
-
- f.write("%s\t%s\n" % contents )
-
- finally:
- f.close()
-
+ GzippedContentWriter.header = None
+ return GzippedContentWriter.header
class Contents(object):
"""
Class capable of generating Contents-$arch.gz files
-
- Usage GenerateContents().generateContents( ["main","contrib","non-free"] )
"""
def __init__(self):
def reject(self, message):
log.error("E: %s" % message)
- # goal column for section column
- _goal_column = 54
-
def cruft(self):
"""
remove files/paths from the DB which are no longer referenced
by binaries and clean the temporary table
"""
- cursor = DBConn().cursor();
- cursor.execute( "BEGIN WORK" )
- cursor.execute( remove_pending_contents_cruft_q )
- cursor.execute( remove_filename_cruft_q )
- cursor.execute( remove_filepath_cruft_q )
- cursor.execute( "COMMIT" )
+ s = DBConn().session()
+
+ # clear out all of the temporarily stored content associations
+ # this should be run only after p-a has run. after a p-a
+ # run we should have either accepted or rejected every package
+ # so there should no longer be anything in the queue
+ s.query(PendingContentAssociation).delete()
+
+ # delete any filenames we are storing which have no binary associated
+ # with them
+ cafq = s.query(ContentAssociation.filename_id).distinct()
+ cfq = s.query(ContentFilename)
+ cfq = cfq.filter(~ContentFilename.cafilename_id.in_(cafq))
+ cfq.delete()
+
+ # delete any paths we are storing which have no binary associated with
+ # them
+ capq = s.query(ContentAssociation.filepath_id).distinct()
+ cpq = s.query(ContentFilepath)
+ cpq = cpq.filter(~ContentFilepath.cafilepath_id.in_(capq))
+ cpq.delete()
+
+ s.commit()
+
+
+ def bootstrap_bin(self):
+ """
+ scan the existing debs in the pool to populate the bin_contents table
+ """
+ pooldir = Config()[ 'Dir::Pool' ]
+
+ s = DBConn().session()
+
+ # for binary in s.query(DBBinary).all() ):
+ binary = s.query(DBBinary).first()
+ if binary:
+ filename = binary.poolfile.filename
+ # Check for existing contents
+ existingq = s.execute( "select 1 from bin_contents where binary_id=:id", {'id':binary.binary_id} );
+ if existingq.fetchone():
+ log.debug( "already imported: %s" % (filename))
+ else:
+ # We don't have existing contents so import them
+ log.debug( "scanning: %s" % (filename) )
+
+ debfile = os.path.join(pooldir, filename)
+ if os.path.exists(debfile):
+ Binary(debfile, self.reject).scan_package(binary.binary_id, True)
+ else:
+ log.error("missing .deb: %s" % filename)
+
def bootstrap(self):
"""
pooldir = Config()[ 'Dir::Pool' ]
- cursor = DBConn().cursor();
- DBConn().prepare("debs_q",debs_q)
- DBConn().prepare("olddeb_q",olddeb_q)
- DBConn().prepare("arches_q",arches_q)
-
- suites = self._suites()
- for suite in [i.lower() for i in suites]:
- suite_id = DBConn().get_suite_id(suite)
-
- arch_list = self._arches(cursor, suite_id)
- arch_all_id = DBConn().get_architecture_id("all")
- for arch_id in arch_list:
- cursor.execute( "EXECUTE debs_q(%d, %d)" % ( suite_id, arch_id[0] ) )
-
- count = 0
- while True:
- deb = cursor.fetchone()
- if not deb:
- break
- count += 1
- cursor1 = DBConn().cursor();
- cursor1.execute( "EXECUTE olddeb_q(%d)" % (deb[0] ) )
- old = cursor1.fetchone()
- if old:
- log.debug( "already imported: %s" % (deb[1]) )
+ s = DBConn().session()
+
+ for suite in s.query(Suite).all():
+ for arch in get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=s):
+ q = s.query(BinAssociation).join(Suite)
+ q = q.join(Suite).filter_by(suite_name=suite.suite_name)
+ q = q.join(DBBinary).join(Architecture).filter_by(arch.arch_string)
+ for ba in q:
+ filename = ba.binary.poolfile.filename
+ # Check for existing contents
+ existingq = s.query(ContentAssociations).filter_by(binary_pkg=ba.binary_id).limit(1)
+ if existingq.count() > 0:
+ log.debug( "already imported: %s" % (filename))
else:
- log.debug( "scanning: %s" % (deb[1]) )
- debfile = os.path.join( pooldir, deb[1] )
- if os.path.exists( debfile ):
- Binary(debfile, self.reject).scan_package(deb[0],True)
+ # We don't have existing contents so import them
+ log.debug( "scanning: %s" % (filename) )
+ debfile = os.path.join(pooldir, filename)
+ if os.path.exists(debfile):
+ Binary(debfile, self.reject).scan_package(ba.binary_id, True)
else:
- log.error("missing .deb: %s" % deb[1])
+ log.error("missing .deb: %s" % filename)
+
def generate(self):
"""
Generate Contents-$arch.gz files for every available arch in each given suite.
"""
- cursor = DBConn().cursor()
-
- DBConn().prepare("arches_q", arches_q)
- DBConn().prepare("contents_q", contents_q)
- DBConn().prepare("udeb_contents_q", udeb_contents_q)
+ session = DBConn().session()
- debtype_id=DBConn().get_override_type_id("deb")
- udebtype_id=DBConn().get_override_type_id("udeb")
+ arch_all_id = get_architecture("all", session).arch_id
- suites = self._suites()
+ # The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free)
+ # This is HORRIBLY debian specific :-/
+ for dtype, section, fn_pattern in \
+ [('deb', None, "dists/%s/Contents-%s.gz"),
+ ('udeb', "debian-installer", "dists/%s/Contents-udeb-%s.gz"),
+ ('udeb', "non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]:
+ overridetype = get_override_type(dtype, session)
- # Get our suites, and the architectures
- for suite in [i.lower() for i in suites]:
- suite_id = DBConn().get_suite_id(suite)
- arch_list = self._arches(cursor, suite_id)
+ # For udebs, we only look in certain sections (see the for loop above)
+ if section is not None:
+ section = get_section(section, session)
- arch_all_id = DBConn().get_architecture_id("all")
+ # Get our suites
+ for suite in which_suites():
+ # Which architectures do we need to work on
+ arch_list = get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=session)
- file_writers = {}
-
- for arch_id in arch_list:
- file_writers[arch_id] = GzippedContentWriter(suite, arch_id[1])
-
-
- print("EXECUTE contents_q(%d,%d,'%s')" % (suite_id, debtype_id, 'deb'))
-# cursor.execute("EXECUTE contents_q(%d,%d,'%s');" % (suite_id, debtype_id, 'deb'))
- cursor.execute("""SELECT (p.path||'/'||n.file) AS fn,
- s.section,
- b.package,
- b.architecture
- from content_associations c join content_file_paths p ON (c.filepath=p.id)
- JOIN content_file_names n ON (c.filename=n.id)
- JOIN binaries b ON (b.id=c.binary_pkg)
- JOIN override o ON (o.package=b.package)
- JOIN section s ON (s.id=o.section)
- WHERE o.suite = %d AND o.type = %d
- and b.type='deb'
- ORDER BY fn""" % (suite_id, debtype_id))
+ # Set up our file writer dictionary
+ file_writers = {}
+ try:
+ # One file writer per arch
+ for arch in arch_list:
+ file_writers[arch.arch_id] = GzippedContentWriter(fn_pattern % (suite, arch.arch_string))
- while True:
- r = cursor.fetchone()
- print( "got contents: %s" % r )
- if not r:
- print( "STU:END" );
- break
+ for r in get_suite_contents(suite, overridetype, section, session=session).fetchall():
+ filename, section, package, arch_id = r
- print( "STU:NOT END" );
- filename, section, package, arch = r
+ if arch_id == arch_all_id:
+ # It's arch all, so all contents files get it
+ for writer in file_writers.values():
+ writer.write(filename, section, package)
+ else:
+ if file_writers.has_key(arch_id):
+ file_writers[arch_id].write(filename, section, package)
- if arch == arch_all_id:
- ## its arch all, so all contents files get it
+ finally:
+ # close all the files
for writer in file_writers.values():
- writer.write(filename, section, package)
-
- else:
- file_writers[arch].write(filename, section, package)
-
- # close all the files
- for writer in file_writers.values():
- writer.close()
-
-
-# self._write_content_file(cursor, "dists/%s/Contents-%s.gz" % (suite, arch_id[1]))
-
- # The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free)
- # This is HORRIBLY debian specific :-/
-# for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s.gz"),
-# ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]:
-
-# for arch_id in arch_list:
-# section_id = DBConn().get_section_id(section) # all udebs should be here)
-# if section_id != -1:
-# cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d,%d,%d)" % (suite_id, udebtype_id, section_id, arch_id[0], arch_all_id))
-
-# self._write_content_file(cursor, fn_pattern % (suite, arch_id[1]))
-
-
-################################################################################
-
- def _suites(self):
- """
- return a list of suites to operate on
- """
- if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
- suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
- else:
- suites = Config().SubTree("Suite").List()
-
- return suites
-
- def _arches(self, cursor, suite):
- """
- return a list of archs to operate on
- """
- arch_list = []
- cursor.execute("EXECUTE arches_q(%d)" % (suite))
- while True:
- r = cursor.fetchone()
- if not r:
- break
-
- if r[1] != "source" and r[1] != "all":
- arch_list.append((r[0], r[1]))
-
- return arch_list
+ writer.finish()
################################################################################
-
def main():
cnf = Config()
]
commands = {'generate' : Contents.generate,
- 'bootstrap' : Contents.bootstrap,
+ 'bootstrap_bin' : Contents.bootstrap_bin,
'cruft' : Contents.cruft,
}
commands[args[0]](Contents())
+def which_suites(session):
+ """
+ return a list of suites to operate on
+ """
+ if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
+ suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
+ else:
+ suites = Config().SubTree("Suite").List()
+
+ return [get_suite(s.lower(), session) for s in suites]
+
+
if __name__ == '__main__':
main()