X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=7221d243f226970ad91fcfce89992098d04dbe7a;hb=a8c91601641c5ae0fee611869c2cd742195a1d9f;hp=bfce1f2c8ebf184a6898328313eda700c07ad55c;hpb=48fc809473bac1a5fba47b03a62f37d1fe9b571e;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py index bfce1f2c..7221d243 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -38,6 +38,14 @@ import re import psycopg2 import traceback import commands + +try: + # python >= 2.6 + import json +except: + # python <= 2.5 + import simplejson as json + from datetime import datetime, timedelta from errno import ENOENT from tempfile import mkstemp, mkdtemp @@ -45,8 +53,10 @@ from tempfile import mkstemp, mkdtemp from inspect import getargspec import sqlalchemy -from sqlalchemy import create_engine, Table, MetaData, Column, Integer -from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, backref +from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \ + Text, ForeignKey +from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \ + backref, MapperExtension, EXT_CONTINUE, object_mapper from sqlalchemy import types as sqltypes # Don't remove this, we re-export the exceptions to scripts which import us @@ -57,7 +67,7 @@ from sqlalchemy.orm.exc import NoResultFound # in the database from config import Config from textutils import fix_maintainer -from dak_exceptions import NoSourceFieldError +from dak_exceptions import DBUpdateError, NoSourceFieldError # suppress some deprecation warnings in squeeze related to sqlalchemy import warnings @@ -156,7 +166,196 @@ __all__.append('session_wrapper') ################################################################################ -class Architecture(object): +class ORMObject(object): + """ + ORMObject is a base class for all ORM classes mapped by SQLalchemy. All + derived classes must implement the properties() method. + """ + + def properties(self): + ''' + This method should be implemented by all derived classes and returns a + list of the important properties. The properties 'created' and + 'modified' will be added automatically. A suffix '_count' should be + added to properties that are lists or query objects. The most important + property name should be returned as the first element in the list + because it is used by repr(). + ''' + return [] + + def json(self): + ''' + Returns a JSON representation of the object based on the properties + returned from the properties() method. + ''' + data = {} + # add created and modified + all_properties = self.properties() + ['created', 'modified'] + for property in all_properties: + # check for list or query + if property[-6:] == '_count': + real_property = property[:-6] + if not hasattr(self, real_property): + continue + value = getattr(self, real_property) + if hasattr(value, '__len__'): + # list + value = len(value) + elif hasattr(value, 'count'): + # query + value = value.count() + else: + raise KeyError('Do not understand property %s.' % property) + else: + if not hasattr(self, property): + continue + # plain object + value = getattr(self, property) + if value is None: + # skip None + continue + elif isinstance(value, ORMObject): + # use repr() for ORMObject types + value = repr(value) + else: + # we want a string for all other types because json cannot + # encode everything + value = str(value) + data[property] = value + return json.dumps(data) + + def classname(self): + ''' + Returns the name of the class. + ''' + return type(self).__name__ + + def __repr__(self): + ''' + Returns a short string representation of the object using the first + element from the properties() method. + ''' + primary_property = self.properties()[0] + value = getattr(self, primary_property) + return '<%s %s>' % (self.classname(), str(value)) + + def __str__(self): + ''' + Returns a human readable form of the object using the properties() + method. + ''' + return '<%s %s>' % (self.classname(), self.json()) + + def not_null_constraints(self): + ''' + Returns a list of properties that must be not NULL. Derived classes + should override this method if needed. + ''' + return [] + + validation_message = \ + "Validation failed because property '%s' must not be empty in object\n%s" + + def validate(self): + ''' + This function validates the not NULL constraints as returned by + not_null_constraints(). It raises the DBUpdateError exception if + validation fails. + ''' + for property in self.not_null_constraints(): + # TODO: It is a bit awkward that the mapper configuration allow + # directly setting the numeric _id columns. We should get rid of it + # in the long run. + if hasattr(self, property + '_id') and \ + getattr(self, property + '_id') is not None: + continue + if not hasattr(self, property) or getattr(self, property) is None: + raise DBUpdateError(self.validation_message % \ + (property, str(self))) + + @classmethod + @session_wrapper + def get(cls, primary_key, session = None): + ''' + This is a support function that allows getting an object by its primary + key. + + Architecture.get(3[, session]) + + instead of the more verbose + + session.query(Architecture).get(3) + ''' + return session.query(cls).get(primary_key) + + def session(self, replace = False): + ''' + Returns the current session that is associated with the object. May + return None is object is in detached state. + ''' + + return object_session(self) + + def clone(self, session = None): + ''' + Clones the current object in a new session and returns the new clone. A + fresh session is created if the optional session parameter is not + provided. The function will fail if a session is provided and has + unflushed changes. + + RATIONALE: SQLAlchemy's session is not thread safe. This method clones + an existing object to allow several threads to work with their own + instances of an ORMObject. + + WARNING: Only persistent (committed) objects can be cloned. Changes + made to the original object that are not committed yet will get lost. + The session of the new object will always be rolled back to avoid + ressource leaks. + ''' + + if self.session() is None: + raise RuntimeError( \ + 'Method clone() failed for detached object:\n%s' % self) + self.session().flush() + mapper = object_mapper(self) + primary_key = mapper.primary_key_from_instance(self) + object_class = self.__class__ + if session is None: + session = DBConn().session() + elif len(session.new) + len(session.dirty) + len(session.deleted) > 0: + raise RuntimeError( \ + 'Method clone() failed due to unflushed changes in session.') + new_object = session.query(object_class).get(primary_key) + session.rollback() + if new_object is None: + raise RuntimeError( \ + 'Method clone() failed for non-persistent object:\n%s' % self) + return new_object + +__all__.append('ORMObject') + +################################################################################ + +class Validator(MapperExtension): + ''' + This class calls the validate() method for each instance for the + 'before_update' and 'before_insert' events. A global object validator is + used for configuring the individual mappers. + ''' + + def before_update(self, mapper, connection, instance): + instance.validate() + return EXT_CONTINUE + + def before_insert(self, mapper, connection, instance): + instance.validate() + return EXT_CONTINUE + +validator = Validator() + +################################################################################ + +class Architecture(ORMObject): def __init__(self, arch_string = None, description = None): self.arch_string = arch_string self.description = description @@ -173,8 +372,11 @@ class Architecture(object): # This signals to use the normal comparison operator return NotImplemented - def __repr__(self): - return '' % self.arch_string + def properties(self): + return ['arch_string', 'arch_id', 'suites_count'] + + def not_null_constraints(self): + return ['arch_string'] __all__.append('Architecture') @@ -264,34 +466,41 @@ __all__.append('get_archive') ################################################################################ -class BinAssociation(object): - def __init__(self, *args, **kwargs): - pass +class BinContents(ORMObject): + def __init__(self, file = None, binary = None): + self.file = file + self.binary = binary - def __repr__(self): - return '' % (self.ba_id, self.binary, self.suite) + def properties(self): + return ['file', 'binary'] -__all__.append('BinAssociation') +__all__.append('BinContents') ################################################################################ -class BinContents(object): - def __init__(self, *args, **kwargs): - pass - - def __repr__(self): - return '' % (self.binary, self.filename) - -__all__.append('BinContents') +class DBBinary(ORMObject): + def __init__(self, package = None, source = None, version = None, \ + maintainer = None, architecture = None, poolfile = None, \ + binarytype = 'deb'): + self.package = package + self.source = source + self.version = version + self.maintainer = maintainer + self.architecture = architecture + self.poolfile = poolfile + self.binarytype = binarytype -################################################################################ + def properties(self): + return ['package', 'version', 'maintainer', 'source', 'architecture', \ + 'poolfile', 'binarytype', 'fingerprint', 'install_date', \ + 'suites_count', 'binary_id', 'contents_count'] -class DBBinary(object): - def __init__(self, *args, **kwargs): - pass + def not_null_constraints(self): + return ['package', 'version', 'maintainer', 'source', 'poolfile', \ + 'binarytype'] - def __repr__(self): - return '' % (self.package, self.version, self.architecture) + def get_component_name(self): + return self.poolfile.location.component.component_name __all__.append('DBBinary') @@ -307,129 +516,42 @@ def get_suites_binary_in(package, session=None): @return: list of Suite objects for the given package """ - return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all() + return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all() __all__.append('get_suites_binary_in') @session_wrapper -def get_binary_from_id(binary_id, session=None): - """ - Returns DBBinary object for given C{id} - - @type binary_id: int - @param binary_id: Id of the required binary - - @type session: Session - @param session: Optional SQLA session object (a temporary one will be - generated if not supplied) - - @rtype: DBBinary - @return: DBBinary object for the given binary (None if not present) - """ - - q = session.query(DBBinary).filter_by(binary_id=binary_id) - - try: - return q.one() - except NoResultFound: - return None - -__all__.append('get_binary_from_id') - -@session_wrapper -def get_binaries_from_name(package, version=None, architecture=None, session=None): - """ - Returns list of DBBinary objects for given C{package} name +def get_component_by_package_suite(package, suite_list, arch_list=[], session=None): + ''' + Returns the component name of the newest binary package in suite_list or + None if no package is found. The result can be optionally filtered by a list + of architecture names. @type package: str @param package: DBBinary package name to search for - @type version: str or None - @param version: Version to search for (or None) - - @type architecture: str, list or None - @param architecture: Architectures to limit to (or None if no limit) + @type suite_list: list of str + @param suite_list: list of suite_name items - @type session: Session - @param session: Optional SQL session object (a temporary one will be - generated if not supplied) - - @rtype: list - @return: list of DBBinary objects for the given name (may be empty) - """ + @type arch_list: list of str + @param arch_list: optional list of arch_string items that defaults to [] - q = session.query(DBBinary).filter_by(package=package) + @rtype: str or NoneType + @return: name of component or None + ''' - if version is not None: - q = q.filter_by(version=version) - - if architecture is not None: - if not isinstance(architecture, list): - architecture = [architecture] - q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture)) - - ret = q.all() - - return ret - -__all__.append('get_binaries_from_name') - -@session_wrapper -def get_binaries_from_source_id(source_id, session=None): - """ - Returns list of DBBinary objects for given C{source_id} - - @type source_id: int - @param source_id: source_id to search for - - @type session: Session - @param session: Optional SQL session object (a temporary one will be - generated if not supplied) - - @rtype: list - @return: list of DBBinary objects for the given name (may be empty) - """ - - return session.query(DBBinary).filter_by(source_id=source_id).all() - -__all__.append('get_binaries_from_source_id') - -@session_wrapper -def get_binary_from_name_suite(package, suitename, session=None): - ### For dak examine-package - ### XXX: Doesn't use object API yet - - sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name - FROM binaries b, files fi, location l, component c, bin_associations ba, suite su - WHERE b.package='%(package)s' - AND b.file = fi.id - AND fi.location = l.id - AND l.component = c.id - AND ba.bin=b.id - AND ba.suite = su.id - AND su.suite_name %(suitename)s - ORDER BY b.version DESC""" - - return session.execute(sql % {'package': package, 'suitename': suitename}) - -__all__.append('get_binary_from_name_suite') - -@session_wrapper -def get_binary_components(package, suitename, arch, session=None): - # Check for packages that have moved from one component to another - query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f - WHERE b.package=:package AND s.suite_name=:suitename - AND (a.arch_string = :arch OR a.arch_string = 'all') - AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id - AND f.location = l.id - AND l.component = c.id - AND b.file = f.id""" - - vals = {'package': package, 'suitename': suitename, 'arch': arch} - - return session.execute(query, vals) + q = session.query(DBBinary).filter_by(package = package). \ + join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list)) + if len(arch_list) > 0: + q = q.join(DBBinary.architecture). \ + filter(Architecture.arch_string.in_(arch_list)) + binary = q.order_by(desc(DBBinary.version)).first() + if binary is None: + return None + else: + return binary.get_component_name() -__all__.append('get_binary_components') +__all__.append('get_component_by_package_suite') ################################################################################ @@ -772,9 +894,9 @@ __all__.append('ChangePendingSource') ################################################################################ -class Component(object): - def __init__(self, *args, **kwargs): - pass +class Component(ORMObject): + def __init__(self, component_name = None): + self.component_name = component_name def __eq__(self, val): if isinstance(val, str): @@ -788,8 +910,12 @@ class Component(object): # This signals to use the normal comparison operator return NotImplemented - def __repr__(self): - return '' % self.component_name + def properties(self): + return ['component_name', 'component_id', 'description', \ + 'location_count', 'meets_dfsg', 'overrides_count'] + + def not_null_constraints(self): + return ['component_name'] __all__.append('Component') @@ -1069,24 +1195,35 @@ __all__.append('get_dscfiles') ################################################################################ -class PoolFile(object): - def __init__(self, *args, **kwargs): - pass - - def __repr__(self): - return '' % self.filename +class PoolFile(ORMObject): + def __init__(self, filename = None, location = None, filesize = -1, \ + md5sum = None): + self.filename = filename + self.location = location + self.filesize = filesize + self.md5sum = md5sum @property def fullpath(self): return os.path.join(self.location.path, self.filename) + def is_valid(self, filesize = -1, md5sum = None): + return self.filesize == long(filesize) and self.md5sum == md5sum + + def properties(self): + return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \ + 'sha256sum', 'location', 'source', 'binary', 'last_used'] + + def not_null_constraints(self): + return ['filename', 'md5sum', 'location'] + __all__.append('PoolFile') @session_wrapper def check_poolfile(filename, filesize, md5sum, location_id, session=None): """ Returns a tuple: - (ValidFileFound [boolean or None], PoolFile object or None) + (ValidFileFound [boolean], PoolFile object or None) @type filename: string @param filename: the filename of the file to check against the DB @@ -1102,34 +1239,24 @@ def check_poolfile(filename, filesize, md5sum, location_id, session=None): @rtype: tuple @return: Tuple of length 2. - - If more than one file found with that name: (C{None}, C{None}) - If valid pool file found: (C{True}, C{PoolFile object}) - If valid pool file not found: - (C{False}, C{None}) if no file found - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch """ - q = session.query(PoolFile).filter_by(filename=filename) - q = q.join(Location).filter_by(location_id=location_id) - - ret = None - - if q.count() > 1: - ret = (None, None) - elif q.count() < 1: - ret = (False, None) - else: - obj = q.one() - if obj.md5sum != md5sum or obj.filesize != int(filesize): - ret = (False, obj) - - if ret is None: - ret = (True, obj) + poolfile = session.query(Location).get(location_id). \ + files.filter_by(filename=filename).first() + valid = False + if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum): + valid = True - return ret + return (valid, poolfile) __all__.append('check_poolfile') +# TODO: the implementation can trivially be inlined at the place where the +# function is called @session_wrapper def get_poolfile_by_id(file_id, session=None): """ @@ -1142,41 +1269,10 @@ def get_poolfile_by_id(file_id, session=None): @return: either the PoolFile object or None """ - q = session.query(PoolFile).filter_by(file_id=file_id) - - try: - return q.one() - except NoResultFound: - return None + return session.query(PoolFile).get(file_id) __all__.append('get_poolfile_by_id') - -@session_wrapper -def get_poolfile_by_name(filename, location_id=None, session=None): - """ - Returns an array of PoolFile objects for the given filename and - (optionally) location_id - - @type filename: string - @param filename: the filename of the file to check against the DB - - @type location_id: int - @param location_id: the id of the location to look in (optional) - - @rtype: array - @return: array of PoolFile objects - """ - - q = session.query(PoolFile).filter_by(filename=filename) - - if location_id is not None: - q = q.join(Location).filter_by(location_id=location_id) - - return q.all() - -__all__.append('get_poolfile_by_name') - @session_wrapper def get_poolfile_like_name(filename, session=None): """ @@ -1231,12 +1327,16 @@ __all__.append('add_poolfile') ################################################################################ -class Fingerprint(object): +class Fingerprint(ORMObject): def __init__(self, fingerprint = None): self.fingerprint = fingerprint - def __repr__(self): - return '' % self.fingerprint + def properties(self): + return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \ + 'binary_reject'] + + def not_null_constraints(self): + return ['fingerprint'] __all__.append('Fingerprint') @@ -1521,12 +1621,19 @@ __all__.append('get_dbchange') ################################################################################ -class Location(object): - def __init__(self, *args, **kwargs): - pass +class Location(ORMObject): + def __init__(self, path = None, component = None): + self.path = path + self.component = component + # the column 'type' should go away, see comment at mapper + self.archive_type = 'pool' - def __repr__(self): - return '' % (self.path, self.location_id) + def properties(self): + return ['path', 'location_id', 'archive_type', 'component', \ + 'files_count'] + + def not_null_constraints(self): + return ['path', 'archive_type'] __all__.append('Location') @@ -1566,12 +1673,15 @@ __all__.append('get_location') ################################################################################ -class Maintainer(object): - def __init__(self, *args, **kwargs): - pass +class Maintainer(ORMObject): + def __init__(self, name = None): + self.name = name - def __repr__(self): - return '''''' % (self.name, self.maintainer_id) + def properties(self): + return ['name', 'maintainer_id'] + + def not_null_constraints(self): + return ['name'] def get_split_maintainer(self): if not hasattr(self, 'name') or self.name is None: @@ -1763,12 +1873,15 @@ __all__.append('get_override') ################################################################################ -class OverrideType(object): - def __init__(self, *args, **kwargs): - pass +class OverrideType(ORMObject): + def __init__(self, overridetype = None): + self.overridetype = overridetype - def __repr__(self): - return '' % self.overridetype + def properties(self): + return ['overridetype', 'overridetype_id', 'overrides_count'] + + def not_null_constraints(self): + return ['overridetype'] __all__.append('OverrideType') @@ -1965,9 +2078,16 @@ __all__.append('get_policy_queue_from_path') ################################################################################ -class Priority(object): - def __init__(self, *args, **kwargs): - pass +class Priority(ORMObject): + def __init__(self, priority = None, level = None): + self.priority = priority + self.level = level + + def properties(self): + return ['priority', 'priority_id', 'level', 'overrides_count'] + + def not_null_constraints(self): + return ['priority', 'level'] def __eq__(self, val): if isinstance(val, str): @@ -1981,9 +2101,6 @@ class Priority(object): # This signals to use the normal comparison operator return NotImplemented - def __repr__(self): - return '' % (self.priority, self.priority_id) - __all__.append('Priority') @session_wrapper @@ -2035,9 +2152,15 @@ __all__.append('get_priorities') ################################################################################ -class Section(object): - def __init__(self, *args, **kwargs): - pass +class Section(ORMObject): + def __init__(self, section = None): + self.section = section + + def properties(self): + return ['section', 'section_id', 'overrides_count'] + + def not_null_constraints(self): + return ['section'] def __eq__(self, val): if isinstance(val, str): @@ -2051,9 +2174,6 @@ class Section(object): # This signals to use the normal comparison operator return NotImplemented - def __repr__(self): - return '
' % self.section - __all__.append('Section') @session_wrapper @@ -2105,12 +2225,24 @@ __all__.append('get_sections') ################################################################################ -class DBSource(object): - def __init__(self, *args, **kwargs): - pass +class DBSource(ORMObject): + def __init__(self, source = None, version = None, maintainer = None, \ + changedby = None, poolfile = None, install_date = None): + self.source = source + self.version = version + self.maintainer = maintainer + self.changedby = changedby + self.poolfile = poolfile + self.install_date = install_date - def __repr__(self): - return '' % (self.source, self.version) + def properties(self): + return ['source', 'source_id', 'maintainer', 'changedby', \ + 'fingerprint', 'poolfile', 'version', 'suites_count', \ + 'install_date', 'binaries_count'] + + def not_null_constraints(self): + return ['source', 'version', 'install_date', 'maintainer', \ + 'changedby', 'poolfile', 'install_date'] __all__.append('DBSource') @@ -2141,10 +2273,14 @@ def source_exists(source, source_version, suites = ["any"], session=None): """ cnf = Config() - ret = 1 + ret = True + + from daklib.regexes import re_bin_only_nmu + orig_source_version = re_bin_only_nmu.sub('', source_version) for suite in suites: - q = session.query(DBSource).filter_by(source=source) + q = session.query(DBSource).filter_by(source=source). \ + filter(DBSource.version.in_([source_version, orig_source_version])) if suite != "any": # source must exist in suite X, or in some other suite that's # mapped to X, recursively... silent-maps are counted too, @@ -2155,28 +2291,17 @@ def source_exists(source, source_version, suites = ["any"], session=None): maps = [ (x[1], x[2]) for x in maps if x[0] == "map" or x[0] == "silent-map" ] s = [suite] - for x in maps: - if x[1] in s and x[0] not in s: - s.append(x[0]) + for (from_, to) in maps: + if from_ in s and to not in s: + s.append(to) - q = q.join(SrcAssociation).join(Suite) - q = q.filter(Suite.suite_name.in_(s)) - - # Reduce the query results to a list of version numbers - ql = [ j.version for j in q.all() ] - - # Try (1) - if source_version in ql: - continue + q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s))) - # Try (2) - from daklib.regexes import re_bin_only_nmu - orig_source_version = re_bin_only_nmu.sub('', source_version) - if orig_source_version in ql: + if q.count() > 0: continue # No source found so return not ok - ret = 0 + ret = False return ret @@ -2194,7 +2319,7 @@ def get_suites_source_in(source, session=None): @return: list of Suite objects for the given source """ - return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all() + return session.query(Suite).filter(Suite.sources.any(source=source)).all() __all__.append('get_suites_source_in') @@ -2233,10 +2358,12 @@ def get_sources_from_name(source, version=None, dm_upload_allowed=None, session= __all__.append('get_sources_from_name') +# FIXME: This function fails badly if it finds more than 1 source package and +# its implementation is trivial enough to be inlined. @session_wrapper def get_source_in_suite(source, suite, session=None): """ - Returns list of DBSource objects for a combination of C{source} and C{suite}. + Returns a DBSource object for a combination of C{source} and C{suite}. - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc} - B{suite} - a suite name, eg. I{unstable} @@ -2252,12 +2379,9 @@ def get_source_in_suite(source, suite, session=None): """ - q = session.query(SrcAssociation) - q = q.join('source').filter_by(source=source) - q = q.join('suite').filter_by(suite_name=suite) - + q = get_suite(suite, session).get_sources(source) try: - return q.one().source + return q.one() except NoResultFound: return None @@ -2293,15 +2417,10 @@ def add_dsc_to_db(u, filename, session=None): source.poolfile_id = entry["files id"] session.add(source) - session.flush() - for suite_name in u.pkg.changes["distribution"].keys(): - sa = SrcAssociation() - sa.source_id = source.source_id - sa.suite_id = get_suite(suite_name).suite_id - session.add(sa) - - session.flush() + suite_names = u.pkg.changes["distribution"].keys() + source.suites = session.query(Suite). \ + filter(Suite.suite_name.in_(suite_names)).all() # Add the source files to the DB (files and dsc_files) dscfile = DSCFile() @@ -2351,8 +2470,6 @@ def add_dsc_to_db(u, filename, session=None): df.poolfile_id = files_id session.add(df) - session.flush() - # Add the src_uploaders to the DB uploader_ids = [source.maintainer_id] if u.pkg.dsc.has_key("uploaders"): @@ -2422,14 +2539,10 @@ def add_deb_to_db(u, filename, session=None): # Add and flush object so it has an ID session.add(bin) - session.flush() - # Add BinAssociations - for suite_name in u.pkg.changes["distribution"].keys(): - ba = BinAssociation() - ba.binary_id = bin.binary_id - ba.suite_id = get_suite(suite_name).suite_id - session.add(ba) + suite_names = u.pkg.changes["distribution"].keys() + bin.suites = session.query(Suite). \ + filter(Suite.suite_name.in_(suite_names)).all() session.flush() @@ -2457,17 +2570,6 @@ __all__.append('SourceACL') ################################################################################ -class SrcAssociation(object): - def __init__(self, *args, **kwargs): - pass - - def __repr__(self): - return '' % (self.sa_id, self.source, self.suite) - -__all__.append('SrcAssociation') - -################################################################################ - class SrcFormat(object): def __init__(self, *args, **kwargs): pass @@ -2506,13 +2608,19 @@ SUITE_FIELDS = [ ('SuiteName', 'suite_name'), ('CopyChanges', 'copychanges'), ('OverrideSuite', 'overridesuite')] -class Suite(object): +# Why the heck don't we have any UNIQUE constraints in table suite? +# TODO: Add UNIQUE constraints for appropriate columns. +class Suite(ORMObject): def __init__(self, suite_name = None, version = None): self.suite_name = suite_name self.version = version - def __repr__(self): - return '' % self.suite_name + def properties(self): + return ['suite_name', 'version', 'sources_count', 'binaries_count', \ + 'overrides_count'] + + def not_null_constraints(self): + return ['suite_name', 'version'] def __eq__(self, val): if isinstance(val, str): @@ -2551,14 +2659,31 @@ class Suite(object): @return: list of Architecture objects for the given name (may be empty) """ - q = object_session(self).query(Architecture). \ - filter(Architecture.suites.contains(self)) + q = object_session(self).query(Architecture).with_parent(self) if skipsrc: q = q.filter(Architecture.arch_string != 'source') if skipall: q = q.filter(Architecture.arch_string != 'all') return q.order_by(Architecture.arch_string).all() + def get_sources(self, source): + """ + Returns a query object representing DBSource that is part of C{suite}. + + - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc} + + @type source: string + @param source: source package name + + @rtype: sqlalchemy.orm.query.Query + @return: a query of DBSource + + """ + + session = object_session(self) + return session.query(DBSource).filter_by(source = source). \ + with_parent(self) + __all__.append('Suite') @session_wrapper @@ -2655,7 +2780,7 @@ __all__.append('get_suite_src_formats') ################################################################################ -class Uid(object): +class Uid(ORMObject): def __init__(self, uid = None, name = None): self.uid = uid self.name = name @@ -2672,8 +2797,11 @@ class Uid(object): # This signals to use the normal comparison operator return NotImplemented - def __repr__(self): - return '' % (self.uid, self.name) + def properties(self): + return ['uid', 'name', 'fingerprint'] + + def not_null_constraints(self): + return ['uid'] __all__.append('Uid') @@ -2759,7 +2887,9 @@ class DBConn(object): 'binary_acl', 'binary_acl_map', 'build_queue', + 'build_queue_files', 'changelogs_text', + 'changes', 'component', 'config', 'changes_pending_binaries', @@ -2786,15 +2916,9 @@ class DBConn(object): 'suite', 'uid', 'upload_blocks', - # The following tables have primary keys but sqlalchemy - # version 0.5 fails to reflect them correctly with database - # versions before upgrade #41. - #'changes', - #'build_queue_files', ) tables_no_primary = ( - 'bin_contents', 'changes_pending_files_map', 'changes_pending_source_files', 'changes_pool_files', @@ -2804,9 +2928,6 @@ class DBConn(object): 'suite_src_formats', 'suite_build_queue_copy', 'udeb_contents', - # see the comment above - 'changes', - 'build_queue_files', ) views = ( @@ -2846,28 +2967,30 @@ class DBConn(object): table = Table(table_name, self.db_meta, autoload=True) setattr(self, 'tbl_%s' % table_name, table) + # bin_contents needs special attention until update #41 has been + # applied + self.tbl_bin_contents = Table('bin_contents', self.db_meta, \ + Column('file', Text, primary_key = True), + Column('binary_id', Integer, ForeignKey('binaries.id'), \ + primary_key = True), + autoload=True, useexisting=True) + for view_name in views: view = Table(view_name, self.db_meta, autoload=True) setattr(self, 'view_%s' % view_name, view) def __setupmappers(self): mapper(Architecture, self.tbl_architecture, - properties = dict(arch_id = self.tbl_architecture.c.id, + properties = dict(arch_id = self.tbl_architecture.c.id, suites = relation(Suite, secondary=self.tbl_suite_architectures, order_by='suite_name', - backref=backref('architectures', order_by='arch_string')))) + backref=backref('architectures', order_by='arch_string'))), + extension = validator) mapper(Archive, self.tbl_archive, properties = dict(archive_id = self.tbl_archive.c.id, archive_name = self.tbl_archive.c.name)) - mapper(BinAssociation, self.tbl_bin_associations, - properties = dict(ba_id = self.tbl_bin_associations.c.id, - suite_id = self.tbl_bin_associations.c.suite, - suite = relation(Suite), - binary_id = self.tbl_bin_associations.c.bin, - binary = relation(DBBinary))) - mapper(PendingBinContents, self.tbl_pending_bin_contents, properties = dict(contents_id =self.tbl_pending_bin_contents.c.id, filename = self.tbl_pending_bin_contents.c.filename, @@ -2906,17 +3029,18 @@ class DBConn(object): maintainer_id = self.tbl_binaries.c.maintainer, maintainer = relation(Maintainer), source_id = self.tbl_binaries.c.source, - source = relation(DBSource), + source = relation(DBSource, backref='binaries'), arch_id = self.tbl_binaries.c.architecture, architecture = relation(Architecture), poolfile_id = self.tbl_binaries.c.file, - poolfile = relation(PoolFile), + poolfile = relation(PoolFile, backref=backref('binary', uselist = False)), binarytype = self.tbl_binaries.c.type, fingerprint_id = self.tbl_binaries.c.sig_fpr, fingerprint = relation(Fingerprint), install_date = self.tbl_binaries.c.install_date, - binassociations = relation(BinAssociation, - primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin)))) + suites = relation(Suite, secondary=self.tbl_bin_associations, + backref=backref('binaries', lazy='dynamic'))), + extension = validator) mapper(BinaryACL, self.tbl_binary_acl, properties = dict(binary_acl_id = self.tbl_binary_acl.c.id)) @@ -2928,7 +3052,8 @@ class DBConn(object): mapper(Component, self.tbl_component, properties = dict(component_id = self.tbl_component.c.id, - component_name = self.tbl_component.c.name)) + component_name = self.tbl_component.c.name), + extension = validator) mapper(DBConfig, self.tbl_config, properties = dict(config_id = self.tbl_config.c.id)) @@ -2944,7 +3069,12 @@ class DBConn(object): properties = dict(file_id = self.tbl_files.c.id, filesize = self.tbl_files.c.size, location_id = self.tbl_files.c.location, - location = relation(Location))) + location = relation(Location, + # using lazy='dynamic' in the back + # reference because we have A LOT of + # files in one location + backref=backref('files', lazy='dynamic'))), + extension = validator) mapper(Fingerprint, self.tbl_fingerprint, properties = dict(fingerprint_id = self.tbl_fingerprint.c.id, @@ -2953,7 +3083,8 @@ class DBConn(object): keyring_id = self.tbl_fingerprint.c.keyring, keyring = relation(Keyring), source_acl = relation(SourceACL), - binary_acl = relation(BinaryACL))) + binary_acl = relation(BinaryACL)), + extension = validator) mapper(Keyring, self.tbl_keyrings, properties = dict(keyring_name = self.tbl_keyrings.c.name, @@ -3014,29 +3145,42 @@ class DBConn(object): mapper(Location, self.tbl_location, properties = dict(location_id = self.tbl_location.c.id, component_id = self.tbl_location.c.component, - component = relation(Component), + component = relation(Component, backref='location'), archive_id = self.tbl_location.c.archive, archive = relation(Archive), - archive_type = self.tbl_location.c.type)) + # FIXME: the 'type' column is old cruft and + # should be removed in the future. + archive_type = self.tbl_location.c.type), + extension = validator) mapper(Maintainer, self.tbl_maintainer, - properties = dict(maintainer_id = self.tbl_maintainer.c.id)) + properties = dict(maintainer_id = self.tbl_maintainer.c.id, + maintains_sources = relation(DBSource, backref='maintainer', + primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)), + changed_sources = relation(DBSource, backref='changedby', + primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))), + extension = validator) mapper(NewComment, self.tbl_new_comments, properties = dict(comment_id = self.tbl_new_comments.c.id)) mapper(Override, self.tbl_override, properties = dict(suite_id = self.tbl_override.c.suite, - suite = relation(Suite), + suite = relation(Suite, \ + backref=backref('overrides', lazy='dynamic')), package = self.tbl_override.c.package, component_id = self.tbl_override.c.component, - component = relation(Component), + component = relation(Component, \ + backref=backref('overrides', lazy='dynamic')), priority_id = self.tbl_override.c.priority, - priority = relation(Priority), + priority = relation(Priority, \ + backref=backref('overrides', lazy='dynamic')), section_id = self.tbl_override.c.section, - section = relation(Section), + section = relation(Section, \ + backref=backref('overrides', lazy='dynamic')), overridetype_id = self.tbl_override.c.type, - overridetype = relation(OverrideType))) + overridetype = relation(OverrideType, \ + backref=backref('overrides', lazy='dynamic')))) mapper(OverrideType, self.tbl_override_type, properties = dict(overridetype = self.tbl_override_type.c.type, @@ -3056,31 +3200,21 @@ class DBConn(object): properties = dict(source_id = self.tbl_source.c.id, version = self.tbl_source.c.version, maintainer_id = self.tbl_source.c.maintainer, - maintainer = relation(Maintainer, - primaryjoin=(self.tbl_source.c.maintainer==self.tbl_maintainer.c.id)), poolfile_id = self.tbl_source.c.file, - poolfile = relation(PoolFile), + poolfile = relation(PoolFile, backref=backref('source', uselist = False)), fingerprint_id = self.tbl_source.c.sig_fpr, fingerprint = relation(Fingerprint), changedby_id = self.tbl_source.c.changedby, - changedby = relation(Maintainer, - primaryjoin=(self.tbl_source.c.changedby==self.tbl_maintainer.c.id)), srcfiles = relation(DSCFile, primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)), suites = relation(Suite, secondary=self.tbl_src_associations, - backref='sources'), - srcuploaders = relation(SrcUploader))) + backref=backref('sources', lazy='dynamic')), + srcuploaders = relation(SrcUploader)), + extension = validator) mapper(SourceACL, self.tbl_source_acl, properties = dict(source_acl_id = self.tbl_source_acl.c.id)) - mapper(SrcAssociation, self.tbl_src_associations, - properties = dict(sa_id = self.tbl_src_associations.c.id, - suite_id = self.tbl_src_associations.c.suite, - suite = relation(Suite), - source_id = self.tbl_src_associations.c.source, - source = relation(DBSource))) - mapper(SrcFormat, self.tbl_src_format, properties = dict(src_format_id = self.tbl_src_format.c.id, format_name = self.tbl_src_format.c.format_name)) @@ -3097,7 +3231,9 @@ class DBConn(object): mapper(Suite, self.tbl_suite, properties = dict(suite_id = self.tbl_suite.c.id, policy_queue = relation(PolicyQueue), - copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy))) + copy_queues = relation(BuildQueue, + secondary=self.tbl_suite_build_queue_copy)), + extension = validator) mapper(SuiteSrcFormat, self.tbl_suite_src_formats, properties = dict(suite_id = self.tbl_suite_src_formats.c.suite, @@ -3107,13 +3243,20 @@ class DBConn(object): mapper(Uid, self.tbl_uid, properties = dict(uid_id = self.tbl_uid.c.id, - fingerprint = relation(Fingerprint))) + fingerprint = relation(Fingerprint)), + extension = validator) mapper(UploadBlock, self.tbl_upload_blocks, properties = dict(upload_block_id = self.tbl_upload_blocks.c.id, fingerprint = relation(Fingerprint, backref="uploadblocks"), uid = relation(Uid, backref="uploadblocks"))) + mapper(BinContents, self.tbl_bin_contents, + properties = dict( + binary = relation(DBBinary, + backref=backref('contents', lazy='dynamic')), + file = self.tbl_bin_contents.c.file)) + ## Connection functions def __createconn(self): from config import Config