generate
generate Contents-$arch.gz files
- bootstrap
- scan the debs in the existing pool and load contents in the the database
+ bootstrap_bin
+ scan the debs in the existing pool and load contents into the bin_contents table
cruft
remove files/paths which are no longer referenced by a binary
s.commit()
+ def bootstrap_bin(self):
+ """
+ scan the existing debs in the pool to populate the bin_contents table
+ """
+ pooldir = Config()[ 'Dir::Pool' ]
+
+ s = DBConn().session()
+
+ # for binary in s.query(DBBinary).all() ):
+ binary = s.query(DBBinary).first()
+ if binary:
+ filename = binary.poolfile.filename
+ # Check for existing contents
+ existingq = s.execute( "select 1 from bin_contents where binary_id=:id", {'id':binary.binary_id} );
+ if existingq.fetchone():
+ log.debug( "already imported: %s" % (filename))
+ else:
+ # We don't have existing contents so import them
+ log.debug( "scanning: %s" % (filename) )
+
+ debfile = os.path.join(pooldir, filename)
+ if os.path.exists(debfile):
+ Binary(debfile, self.reject).scan_package(binary.binary_id, True)
+ else:
+ log.error("missing .deb: %s" % filename)
+
+
+
def bootstrap(self):
"""
scan the existing debs in the pool to populate the contents database tables
]
commands = {'generate' : Contents.generate,
- 'bootstrap' : Contents.bootstrap,
+ 'bootstrap_bin' : Contents.bootstrap_bin,
'cruft' : Contents.cruft,
}
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+adding a bin_contents table to hold lists of files contained in .debs and .udebs
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Mike O'Connor <stew@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def do_update(self):
+
+ print "adding a bin_contents table to hold lists of files contained in .debs and .udebs"
+
+ try:
+ c = self.db.cursor()
+ c.execute("""CREATE TABLE bin_contents (
+ file text,
+ binary_id integer,
+ UNIQUE(file,binary_id))""" )
+
+ c.execute("""ALTER TABLE ONLY bin_contents
+ ADD CONSTRAINT bin_contents_bin_fkey
+ FOREIGN KEY (binary_id) REFERENCES binaries(id)
+ ON DELETE CASCADE;""")
+
+ c.execute("""CREATE INDEX ind_bin_contents_binary ON bin_contents(binary_id);""" )
+
+ c.execute("UPDATE config SET value = '17' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to apply process-new update 17, rollback issued. Error message : %s" % (str(msg))
+
+
+
################################################################################
Cnf = None
-required_database_schema = 16
+required_database_schema = 17
################################################################################
################################################################################
-default_config = "/etc/dak/dak.conf"
+default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
def which_conf_file(Cnf):
res = socket.gethostbyaddr(socket.gethostname())
################################################################################
+class BinContents(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<BinContents (%s, %s)>' % (self.binary, self.filename)
+
+__all__.append('BinContents')
+
+################################################################################
+
class DBBinary(object):
def __init__(self, *args, **kwargs):
pass
################################################################################
-class ContentFilename(object):
- def __init__(self, *args, **kwargs):
- pass
-
- def __repr__(self):
- return '<ContentFilename %s>' % self.filename
-
-__all__.append('ContentFilename')
-
@session_wrapper
def get_or_set_contents_file_id(filename, session=None):
"""
# Insert paths
pathcache = {}
for fullpath in fullpaths:
- # Get the necessary IDs ...
- (path, file) = os.path.split(fullpath)
-
- filepath_id = get_or_set_contents_path_id(path, session)
- filename_id = get_or_set_contents_file_id(file, session)
-
- pathcache[fullpath] = (filepath_id, filename_id)
+ if fullpath.startswith( './' ):
+ fullpath = fullpath[2:]
- for fullpath, dat in pathcache.items():
- ca = ContentAssociation()
- ca.binary_id = binary_id
- ca.filepath_id = dat[0]
- ca.filename_id = dat[1]
- session.add(ca)
+ session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", { 'filename': fullpath, 'id': binary_id} )
- # Only commit if we set up the session ourself
+ session.commit()
if privatetrans:
- session.commit()
session.close()
- else:
- session.flush()
-
return True
except:
binary_id = self.tbl_bin_associations.c.bin,
binary = relation(DBBinary)))
+
mapper(DBBinary, self.tbl_binaries,
properties = dict(binary_id = self.tbl_binaries.c.id,
package = self.tbl_binaries.c.package,
mapper(DBConfig, self.tbl_config,
properties = dict(config_id = self.tbl_config.c.id))
- mapper(ContentAssociation, self.tbl_content_associations,
- properties = dict(ca_id = self.tbl_content_associations.c.id,
- filename_id = self.tbl_content_associations.c.filename,
- filename = relation(ContentFilename),
- filepath_id = self.tbl_content_associations.c.filepath,
- filepath = relation(ContentFilepath),
- binary_id = self.tbl_content_associations.c.binary_pkg,
- binary = relation(DBBinary)))
-
-
- mapper(ContentFilename, self.tbl_content_file_names,
- properties = dict(cafilename_id = self.tbl_content_file_names.c.id,
- filename = self.tbl_content_file_names.c.file))
-
- mapper(ContentFilepath, self.tbl_content_file_paths,
- properties = dict(cafilepath_id = self.tbl_content_file_paths.c.id,
- filepath = self.tbl_content_file_paths.c.path))
-
mapper(DSCFile, self.tbl_dsc_files,
properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
source_id = self.tbl_dsc_files.c.source,
properties = dict(overridetype = self.tbl_override_type.c.type,
overridetype_id = self.tbl_override_type.c.id))
- mapper(PendingContentAssociation, self.tbl_pending_content_associations,
- properties = dict(pca_id = self.tbl_pending_content_associations.c.id,
- filepath_id = self.tbl_pending_content_associations.c.filepath,
- filepath = relation(ContentFilepath),
- filename_id = self.tbl_pending_content_associations.c.filename,
- filename = relation(ContentFilename)))
-
mapper(Priority, self.tbl_priority,
properties = dict(priority_id = self.tbl_priority.c.id))
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
re_multi_line_field, re_srchasver, re_taint_free, \
- re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
+ re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
+ re_is_orig_source
from formats import parse_format, validate_changes_format
from srcformats import get_format_from_string
try:
try:
file_handle = open_file(f)
-
+
# Check for the hash entry, to not trigger a KeyError.
if not files[f].has_key(hash_key(hashname)):
rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
where))
continue
-
+
# Actually check the hash for correctness.
if hashfunc(file_handle) != files[f][hash_key(hashname)]:
rejmsg.append("%s: %s check failed in %s" % (f, hashname,