From: Mike O'Connor Date: Wed, 28 Oct 2009 19:50:06 +0000 (+0100) Subject: Merge remote branch 'flotow/master' X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=274fdac9a30c88bd6df70bd4e30b6a684775415a;hp=d0779ef69d429a210405002f252067806ddfa6f2;p=dak.git Merge remote branch 'flotow/master' --- diff --git a/dak/contents.py b/dak/contents.py index c435afc5..834cbccf 100755 --- a/dak/contents.py +++ b/dak/contents.py @@ -55,8 +55,8 @@ COMMANDS generate generate Contents-$arch.gz files - bootstrap - scan the debs in the existing pool and load contents in the the database + bootstrap_bin + scan the debs in the existing pool and load contents into the bin_contents table cruft remove files/paths which are no longer referenced by a binary @@ -230,6 +230,34 @@ class Contents(object): s.commit() + def bootstrap_bin(self): + """ + scan the existing debs in the pool to populate the bin_contents table + """ + pooldir = Config()[ 'Dir::Pool' ] + + s = DBConn().session() + + # for binary in s.query(DBBinary).all() ): + binary = s.query(DBBinary).first() + if binary: + filename = binary.poolfile.filename + # Check for existing contents + existingq = s.execute( "select 1 from bin_contents where binary_id=:id", {'id':binary.binary_id} ); + if existingq.fetchone(): + log.debug( "already imported: %s" % (filename)) + else: + # We don't have existing contents so import them + log.debug( "scanning: %s" % (filename) ) + + debfile = os.path.join(pooldir, filename) + if os.path.exists(debfile): + Binary(debfile, self.reject).scan_package(binary.binary_id, True) + else: + log.error("missing .deb: %s" % filename) + + + def bootstrap(self): """ scan the existing debs in the pool to populate the contents database tables @@ -320,7 +348,7 @@ def main(): ] commands = {'generate' : Contents.generate, - 'bootstrap' : Contents.bootstrap, + 'bootstrap_bin' : Contents.bootstrap_bin, 'cruft' : Contents.cruft, } diff --git a/dak/dakdb/update17.py b/dak/dakdb/update17.py new file mode 100644 index 00000000..0d7efa9e --- /dev/null +++ b/dak/dakdb/update17.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +# coding=utf8 + +""" +adding a bin_contents table to hold lists of files contained in .debs and .udebs + +@contact: Debian FTP Master +@copyright: 2009 Mike O'Connor +@license: GNU General Public License version 2 or later +""" + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +################################################################################ + + +################################################################################ + +import psycopg2 +import time +from daklib.dak_exceptions import DBUpdateError + +################################################################################ + +def do_update(self): + + print "adding a bin_contents table to hold lists of files contained in .debs and .udebs" + + try: + c = self.db.cursor() + c.execute("""CREATE TABLE bin_contents ( + file text, + binary_id integer, + UNIQUE(file,binary_id))""" ) + + c.execute("""ALTER TABLE ONLY bin_contents + ADD CONSTRAINT bin_contents_bin_fkey + FOREIGN KEY (binary_id) REFERENCES binaries(id) + ON DELETE CASCADE;""") + + c.execute("""CREATE INDEX ind_bin_contents_binary ON bin_contents(binary_id);""" ) + + self.db.commit() + + except psycopg2.ProgrammingError, msg: + self.db.rollback() + raise DBUpdateError, "Unable to apply process-new update 17, rollback issued. Error message : %s" % (str(msg)) + + + diff --git a/dak/update_db.py b/dak/update_db.py index ecf5cd2a..a51c7c1b 100755 --- a/dak/update_db.py +++ b/dak/update_db.py @@ -44,7 +44,7 @@ from daklib.dak_exceptions import DBUpdateError ################################################################################ Cnf = None -required_database_schema = 15 +required_database_schema = 17 ################################################################################ @@ -107,9 +107,10 @@ Updates dak's database schema to the lastest version. You should disable crontab try: # Build a connect string - connect_str = "dbname=%s"% (Cnf["DB::Name"]) - if Cnf["DB::Host"] != '': connect_str += " host=%s" % (Cnf["DB::Host"]) - if Cnf["DB::Port"] != '-1': connect_str += " port=%d" % (int(Cnf["DB::Port"])) +# connect_str = "dbname=%s"% (Cnf["DB::Name"]) + connect_str = "dbname=%s"% "projectbstew" +# if Cnf["DB::Host"] != '': connect_str += " host=%s" % (Cnf["DB::Host"]) +# if Cnf["DB::Port"] != '-1': connect_str += " port=%d" % (int(Cnf["DB::Port"])) self.db = psycopg2.connect(connect_str) diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 9421b28f..3c0bc50d 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -230,6 +230,17 @@ __all__.append('BinAssociation') ################################################################################ +class BinContents(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % (self.binary, self.filename) + +__all__.append('BinContents') + +################################################################################ + class DBBinary(object): def __init__(self, *args, **kwargs): pass @@ -435,15 +446,6 @@ __all__.append('DBConfig') ################################################################################ -class ContentFilename(object): - def __init__(self, *args, **kwargs): - pass - - def __repr__(self): - return '' % self.filename - -__all__.append('ContentFilename') - @session_wrapper def get_or_set_contents_file_id(filename, session=None): """ @@ -610,28 +612,14 @@ def insert_content_paths(binary_id, fullpaths, session=None): # Insert paths pathcache = {} for fullpath in fullpaths: - # Get the necessary IDs ... - (path, file) = os.path.split(fullpath) + if fullpath.startswith( './' ): + fullpath = fullpath[2:] + + session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", { 'filename': fullpath, 'id': binary_id} ) - filepath_id = get_or_set_contents_path_id(path, session) - filename_id = get_or_set_contents_file_id(file, session) - - pathcache[fullpath] = (filepath_id, filename_id) - - for fullpath, dat in pathcache.items(): - ca = ContentAssociation() - ca.binary_id = binary_id - ca.filepath_id = dat[0] - ca.filename_id = dat[1] - session.add(ca) - - # Only commit if we set up the session ourself + session.commit() if privatetrans: - session.commit() session.close() - else: - session.flush() - return True except: @@ -2129,6 +2117,7 @@ class DBConn(Singleton): binary_id = self.tbl_bin_associations.c.bin, binary = relation(DBBinary))) + mapper(DBBinary, self.tbl_binaries, properties = dict(binary_id = self.tbl_binaries.c.id, package = self.tbl_binaries.c.package, @@ -2155,24 +2144,6 @@ class DBConn(Singleton): mapper(DBConfig, self.tbl_config, properties = dict(config_id = self.tbl_config.c.id)) - mapper(ContentAssociation, self.tbl_content_associations, - properties = dict(ca_id = self.tbl_content_associations.c.id, - filename_id = self.tbl_content_associations.c.filename, - filename = relation(ContentFilename), - filepath_id = self.tbl_content_associations.c.filepath, - filepath = relation(ContentFilepath), - binary_id = self.tbl_content_associations.c.binary_pkg, - binary = relation(DBBinary))) - - - mapper(ContentFilename, self.tbl_content_file_names, - properties = dict(cafilename_id = self.tbl_content_file_names.c.id, - filename = self.tbl_content_file_names.c.file)) - - mapper(ContentFilepath, self.tbl_content_file_paths, - properties = dict(cafilepath_id = self.tbl_content_file_paths.c.id, - filepath = self.tbl_content_file_paths.c.path)) - mapper(DSCFile, self.tbl_dsc_files, properties = dict(dscfile_id = self.tbl_dsc_files.c.id, source_id = self.tbl_dsc_files.c.source, @@ -2227,13 +2198,6 @@ class DBConn(Singleton): properties = dict(overridetype = self.tbl_override_type.c.type, overridetype_id = self.tbl_override_type.c.id)) - mapper(PendingContentAssociation, self.tbl_pending_content_associations, - properties = dict(pca_id = self.tbl_pending_content_associations.c.id, - filepath_id = self.tbl_pending_content_associations.c.filepath, - filepath = relation(ContentFilepath), - filename_id = self.tbl_pending_content_associations.c.filename, - filename = relation(ContentFilename))) - mapper(Priority, self.tbl_priority, properties = dict(priority_id = self.tbl_priority.c.id)) diff --git a/daklib/utils.py b/daklib/utils.py index b0b71c01..25b16125 100755 --- a/daklib/utils.py +++ b/daklib/utils.py @@ -1504,8 +1504,8 @@ apt_pkg.init() Cnf = apt_pkg.newConfiguration() apt_pkg.ReadConfigFileISC(Cnf,default_config) -if which_conf_file() != default_config: - apt_pkg.ReadConfigFileISC(Cnf,which_conf_file()) +#if which_conf_file() != default_config: +# apt_pkg.ReadConfigFileISC(Cnf,which_conf_file()) ###############################################################################