import gzip
import apt_pkg
from daklib import utils
-from daklib.Config import Config
-from daklib.DBConn import DBConn
+from daklib.binary import Binary
+from daklib.config import Config
+from daklib.dbconn import DBConn
################################################################################
def usage (exit_code=0):
supress all output but errors
-s, --suite={stable,testing,unstable,...}
- only operate on a signle suite
+ only operate on a single suite
-a, --arch={i386,amd64}
- only operate on a signle architecture
+ only operate on a single architecture
"""
sys.exit(exit_code)
# where in dak.conf all of our configuration will be stowed
options_prefix = "Contents"
-options_prefix = "%s::Opitons" % options_prefix
-header_prefix = "%s::Header" % options_prefix
+options_prefix = "%s::Options" % options_prefix
log = logging.getLogger()
################################################################################
-latin1_q = """SET CLIENT_ENCODING TO 'LATIN1'"""
-
+# get all the arches delivered for a given suite
+# this should probably exist somehere common
arches_q = """PREPARE arches_q as
SELECT s.architecture, a.arch_string
FROM suite_architectures s
JOIN architecture a ON (s.architecture=a.id)
WHERE suite = $1"""
+# find me the .deb for a given binary id
debs_q = """PREPARE debs_q as
SELECT b.id, f.filename FROM bin_assoc_by_arch baa
JOIN binaries b ON baa.bin=b.id
WHERE suite = $1
AND arch = $2"""
+# ask if we already have contents associated with this binary
olddeb_q = """PREPARE olddeb_q as
SELECT 1 FROM content_associations
WHERE binary_pkg = $1
LIMIT 1"""
+# find me all of the contents for a given .deb
contents_q = """PREPARE contents_q as
SELECT (p.path||'/'||n.file) AS fn,
comma_separated_list(s.section||'/'||b.package)
GROUP BY fn
ORDER BY fn"""
+# find me all of the contents for a given .udeb
udeb_contents_q = """PREPARE udeb_contents_q as
SELECT (p.path||'/'||n.file) as fn,
comma_separated_list(s.section||'/'||b.package)
GROUP BY fn
ORDER BY fn"""
+# clear out all of the temporarily stored content associations
+# this should be run only after p-a has run. after a p-a
+# run we should have either accepted or rejected every package
+# so there should no longer be anything in the queue
+remove_pending_contents_cruft_q = """DELETE FROM pending_content_associations"""
+
+# delete any filenames we are storing which have no binary associated with them
+remove_filename_cruft_q = """DELETE FROM content_file_names
+ WHERE id IN (SELECT cfn.id FROM content_file_names cfn
+ LEFT JOIN content_associations ca
+ ON ca.filename=cfn.id
+ WHERE ca.id IS NULL)"""
+
+# delete any paths we are storing which have no binary associated with them
+remove_filepath_cruft_q = """DELETE FROM content_file_paths
+ WHERE id IN (SELECT cfn.id FROM content_file_paths cfn
+ LEFT JOIN content_associations ca
+ ON ca.filepath=cfn.id
+ WHERE ca.id IS NULL)"""
class Contents(object):
"""
Class capable of generating Contents-$arch.gz files
self.header = None
def _getHeader(self):
- # Internal method to return the header for Contents.gz files
+ """
+ Internal method to return the header for Contents.gz files
+
+ This is boilerplate which explains the contents of the file and how
+ it can be used.
+ """
if self.header == None:
if Config().has_key("Contents::Header"):
try:
h = open(os.path.join( Config()["Dir::Templates"],
Config()["Contents::Header"] ), "r")
self.header = h.read()
+ print( "header: %s" % self.header )
h.close()
except:
- log.error( "error openeing header file: %d\n%s" % (Config()["Contents::Header"],
- traceback.format_exc() ))
+ log.error( "error opening header file: %d\n%s" % (Config()["Contents::Header"],
+ traceback.format_exc() ))
self.header = False
else:
+ print( "no header" )
self.header = False
return self.header
_goal_column = 54
def _write_content_file(self, cursor, filename):
- # Internal method for writing all the results to a given file
+ """
+ Internal method for writing all the results to a given file.
+ The cursor should have a result set generated from a query already.
+ """
f = gzip.open(Config()["Dir::Root"] + filename, "w")
try:
header = self._getHeader()
def cruft(self):
"""
- remove files/paths from the DB which are no longer referenced by binaries
+ remove files/paths from the DB which are no longer referenced
+ by binaries and clean the temporary table
"""
cursor = DBConn().cursor();
cursor.execute( "BEGIN WORK" )
- cursor.execute( """DELETE FROM content_file_names
- WHERE id IN (SELECT cfn.id FROM content_file_names cfn
- LEFT JOIN content_associations ca
- ON ca.filename=cfn.id
- WHERE ca.id IS NULL)""" );
- cursor.execute( """DELETE FROM content_file_paths
- WHERE id IN (SELECT cfn.id FROM content_file_paths cfn
- LEFT JOIN content_associations ca
- ON ca.filepath=cfn.id
- WHERE ca.id IS NULL)""" );
+ cursor.execute( remove_pending_contents_cruft_q )
+ cursor.execute( remove_filename_cruft_q )
+ cursor.execute( remove_filepath_cruft_q )
cursor.execute( "COMMIT" )
pooldir = Config()[ 'Dir::Pool' ]
cursor = DBConn().cursor();
- cursor.execute( latin1_q )
cursor.execute( debs_q )
cursor.execute( olddeb_q )
cursor.execute( arches_q )
for arch_id in arch_list:
cursor.execute( "EXECUTE debs_q(%d, %d)" % ( suite_id, arch_id[0] ) )
- debs = cursor.fetchall()
count = 0
- for deb in debs:
+ while True:
+ deb = cursor.fetchone()
+ if not deb:
+ break
count += 1
- cursor.execute( "EXECUTE olddeb_q(%d)" % (deb[0] ) )
- old = cursor.fetchone()
+ cursor1 = DBConn().cursor();
+ cursor1.execute( "EXECUTE olddeb_q(%d)" % (deb[0] ) )
+ old = cursor1.fetchone()
if old:
log.debug( "already imported: %s" % deb[1] )
else:
debfile = os.path.join( pooldir, deb[1] )
if os.path.exists( debfile ):
- contents = utils.generate_contents_information( debfile )
- DBConn().insert_content_paths(deb[0], contents)
- log.info( "imported (%d/%d): %s" % (count,len(debs),deb[1] ) )
+ Binary(debfile).scan_package( deb[0] )
else:
log.error( "missing .deb: %s" % deb[1] )
# Get our suites, and the architectures
for suite in [i.lower() for i in suites]:
suite_id = DBConn().get_suite_id(suite)
-
arch_list = self._arches(cursor, suite_id)
arch_all_id = DBConn().get_architecture_id("all")
# The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free)
# This is HORRIBLY debian specific :-/
- # First off, udeb
- section_id = DBConn().get_section_id('debian-installer') # all udebs should be here)
- if section_id != -1:
- cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (section_id, suite_id, suite_id))
- self._write_content_file(cursor, "dists/%s/Contents-udeb.gz" % suite)
+ for section_id, fn_pattern in [("debian-installer","dists/%s/Contents-udeb.gz"),
+ ("non-free/debian-installer", "dists/%s/Contents-udeb-nf.gz")]:
- # Once more, with non-free
- section_id = DBConn().get_section_id('non-free/debian-installer') # all udebs should be here)
-
- if section_id != -1:
- cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (section_id, suite_id, suite_id))
- self._write_content_file(cursor, "dists/%s/Contents-udeb-nf.gz" % suite)
+ section_id = DBConn().get_section_id(section_id) # all udebs should be here)
+ if section_id != -1:
+ cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (section_id, suite_id, suite_id))
+ self._write_content_file(cursor, fn_pattern % suite)
################################################################################
def _suites(self):
- # return a list of suites to operate on
+ """
+ return a list of suites to operate on
+ """
if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
else:
return suites
def _arches(self, cursor, suite):
- # return a list of archs to operate on
+ """
+ return a list of archs to operate on
+ """
arch_list = [ ]
if Config().has_key( "%s::%s" %(options_prefix,"Arch")):
archs = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Arch")])
level=logging.DEBUG
- logging.basicConfig( level=logging.DEBUG,
+ logging.basicConfig( level=level,
format='%(asctime)s %(levelname)s %(message)s',
stream = sys.stderr )