]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/dbconn.py
Convert class Component to ORMObject.
[dak.git] / daklib / dbconn.py
index 1364539422961acf89e3a422e9cbf66a5f864e18..e1e21d2b2b1f10a386dd42c50460287d7e6c8f4d 100755 (executable)
@@ -54,7 +54,8 @@ from inspect import getargspec
 
 import sqlalchemy
 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
-from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, backref
+from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
+    backref, MapperExtension, EXT_CONTINUE
 from sqlalchemy import types as sqltypes
 
 # Don't remove this, we re-export the exceptions to scripts which import us
@@ -65,7 +66,7 @@ from sqlalchemy.orm.exc import NoResultFound
 # in the database
 from config import Config
 from textutils import fix_maintainer
-from dak_exceptions import NoSourceFieldError
+from dak_exceptions import DBUpdateError, NoSourceFieldError
 
 # suppress some deprecation warnings in squeeze related to sqlalchemy
 import warnings
@@ -167,7 +168,7 @@ __all__.append('session_wrapper')
 class ORMObject(object):
     """
     ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
-    derived classes must implement the summary() method.
+    derived classes must implement the properties() method.
     """
 
     def properties(self):
@@ -192,7 +193,10 @@ class ORMObject(object):
         for property in all_properties:
             # check for list or query
             if property[-6:] == '_count':
-                value = getattr(self, property[:-6])
+                real_property = property[:-6]
+                if not hasattr(self, real_property):
+                    continue
+                value = getattr(self, real_property)
                 if hasattr(value, '__len__'):
                     # list
                     value = len(value)
@@ -202,17 +206,19 @@ class ORMObject(object):
                 else:
                     raise KeyError('Do not understand property %s.' % property)
             else:
+                if not hasattr(self, property):
+                    continue
                 # plain object
                 value = getattr(self, property)
                 if value is None:
                     # skip None
-                    pass
+                    continue
                 elif isinstance(value, ORMObject):
                     # use repr() for ORMObject types
                     value = repr(value)
                 else:
                     # we want a string for all other types because json cannot
-                    # everything
+                    # encode everything
                     value = str(value)
             data[property] = value
         return json.dumps(data)
@@ -239,10 +245,71 @@ class ORMObject(object):
         '''
         return '<%s %s>' % (self.classname(), self.json())
 
+    def not_null_constraints(self):
+        '''
+        Returns a list of properties that must be not NULL. Derived classes
+        should override this method if needed.
+        '''
+        return []
+
+    validation_message = \
+        "Validation failed because property '%s' must not be empty in object\n%s"
+
+    def validate(self):
+        '''
+        This function validates the not NULL constraints as returned by
+        not_null_constraints(). It raises the DBUpdateError exception if
+        validation fails.
+        '''
+        for property in self.not_null_constraints():
+            # TODO: It is a bit awkward that the mapper configuration allow
+            # directly setting the numeric _id columns. We should get rid of it
+            # in the long run.
+            if hasattr(self, property + '_id') and \
+                getattr(self, property + '_id') is not None:
+                continue
+            if not hasattr(self, property) or getattr(self, property) is None:
+                raise DBUpdateError(self.validation_message % \
+                    (property, str(self)))
+
+    @classmethod
+    @session_wrapper
+    def get(cls, primary_key,  session = None):
+        '''
+        This is a support function that allows getting an object by its primary
+        key.
+
+        Architecture.get(3[, session])
+
+        instead of the more verbose
+
+        session.query(Architecture).get(3)
+        '''
+        return session.query(cls).get(primary_key)
+
 __all__.append('ORMObject')
 
 ################################################################################
 
+class Validator(MapperExtension):
+    '''
+    This class calls the validate() method for each instance for the
+    'before_update' and 'before_insert' events. A global object validator is
+    used for configuring the individual mappers.
+    '''
+
+    def before_update(self, mapper, connection, instance):
+        instance.validate()
+        return EXT_CONTINUE
+
+    def before_insert(self, mapper, connection, instance):
+        instance.validate()
+        return EXT_CONTINUE
+
+validator = Validator()
+
+################################################################################
+
 class Architecture(ORMObject):
     def __init__(self, arch_string = None, description = None):
         self.arch_string = arch_string
@@ -263,6 +330,9 @@ class Architecture(ORMObject):
     def properties(self):
         return ['arch_string', 'arch_id', 'suites_count']
 
+    def not_null_constraints(self):
+        return ['arch_string']
+
 __all__.append('Architecture')
 
 @session_wrapper
@@ -351,17 +421,6 @@ __all__.append('get_archive')
 
 ################################################################################
 
-class BinAssociation(object):
-    def __init__(self, *args, **kwargs):
-        pass
-
-    def __repr__(self):
-        return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
-
-__all__.append('BinAssociation')
-
-################################################################################
-
 class BinContents(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -373,12 +432,26 @@ __all__.append('BinContents')
 
 ################################################################################
 
-class DBBinary(object):
-    def __init__(self, *args, **kwargs):
-        pass
+class DBBinary(ORMObject):
+    def __init__(self, package = None, source = None, version = None, \
+        maintainer = None, architecture = None, poolfile = None, \
+        binarytype = 'deb'):
+        self.package = package
+        self.source = source
+        self.version = version
+        self.maintainer = maintainer
+        self.architecture = architecture
+        self.poolfile = poolfile
+        self.binarytype = binarytype
 
-    def __repr__(self):
-        return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
+    def properties(self):
+        return ['package', 'version', 'maintainer', 'source', 'architecture', \
+            'poolfile', 'binarytype', 'fingerprint', 'install_date', \
+            'suites_count', 'binary_id']
+
+    def not_null_constraints(self):
+        return ['package', 'version', 'maintainer', 'source',  'poolfile', \
+            'binarytype']
 
 __all__.append('DBBinary')
 
@@ -394,93 +467,10 @@ def get_suites_binary_in(package, session=None):
     @return: list of Suite objects for the given package
     """
 
-    return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
+    return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
 
 __all__.append('get_suites_binary_in')
 
-@session_wrapper
-def get_binary_from_id(binary_id, session=None):
-    """
-    Returns DBBinary object for given C{id}
-
-    @type binary_id: int
-    @param binary_id: Id of the required binary
-
-    @type session: Session
-    @param session: Optional SQLA session object (a temporary one will be
-    generated if not supplied)
-
-    @rtype: DBBinary
-    @return: DBBinary object for the given binary (None if not present)
-    """
-
-    q = session.query(DBBinary).filter_by(binary_id=binary_id)
-
-    try:
-        return q.one()
-    except NoResultFound:
-        return None
-
-__all__.append('get_binary_from_id')
-
-@session_wrapper
-def get_binaries_from_name(package, version=None, architecture=None, session=None):
-    """
-    Returns list of DBBinary objects for given C{package} name
-
-    @type package: str
-    @param package: DBBinary package name to search for
-
-    @type version: str or None
-    @param version: Version to search for (or None)
-
-    @type architecture: str, list or None
-    @param architecture: Architectures to limit to (or None if no limit)
-
-    @type session: Session
-    @param session: Optional SQL session object (a temporary one will be
-    generated if not supplied)
-
-    @rtype: list
-    @return: list of DBBinary objects for the given name (may be empty)
-    """
-
-    q = session.query(DBBinary).filter_by(package=package)
-
-    if version is not None:
-        q = q.filter_by(version=version)
-
-    if architecture is not None:
-        if not isinstance(architecture, list):
-            architecture = [architecture]
-        q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
-
-    ret = q.all()
-
-    return ret
-
-__all__.append('get_binaries_from_name')
-
-@session_wrapper
-def get_binaries_from_source_id(source_id, session=None):
-    """
-    Returns list of DBBinary objects for given C{source_id}
-
-    @type source_id: int
-    @param source_id: source_id to search for
-
-    @type session: Session
-    @param session: Optional SQL session object (a temporary one will be
-    generated if not supplied)
-
-    @rtype: list
-    @return: list of DBBinary objects for the given name (may be empty)
-    """
-
-    return session.query(DBBinary).filter_by(source_id=source_id).all()
-
-__all__.append('get_binaries_from_source_id')
-
 @session_wrapper
 def get_binary_from_name_suite(package, suitename, session=None):
     ### For dak examine-package
@@ -859,9 +849,9 @@ __all__.append('ChangePendingSource')
 
 ################################################################################
 
-class Component(object):
-    def __init__(self, *args, **kwargs):
-        pass
+class Component(ORMObject):
+    def __init__(self, component_name = None):
+        self.component_name = component_name
 
     def __eq__(self, val):
         if isinstance(val, str):
@@ -875,8 +865,12 @@ class Component(object):
         # This signals to use the normal comparison operator
         return NotImplemented
 
-    def __repr__(self):
-        return '<Component %s>' % self.component_name
+    def properties(self):
+        return ['component_name', 'component_id', 'description', 'location', \
+            'meets_dfsg']
+
+    def not_null_constraints(self):
+        return ['component_name']
 
 
 __all__.append('Component')
@@ -1173,7 +1167,10 @@ class PoolFile(ORMObject):
 
     def properties(self):
         return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
-            'sha256sum', 'location', 'source', 'last_used']
+            'sha256sum', 'location', 'source', 'binary', 'last_used']
+
+    def not_null_constraints(self):
+        return ['filename', 'md5sum', 'location']
 
 __all__.append('PoolFile')
 
@@ -1285,12 +1282,16 @@ __all__.append('add_poolfile')
 
 ################################################################################
 
-class Fingerprint(object):
+class Fingerprint(ORMObject):
     def __init__(self, fingerprint = None):
         self.fingerprint = fingerprint
 
-    def __repr__(self):
-        return '<Fingerprint %s>' % self.fingerprint
+    def properties(self):
+        return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
+            'binary_reject']
+
+    def not_null_constraints(self):
+        return ['fingerprint']
 
 __all__.append('Fingerprint')
 
@@ -1575,14 +1576,20 @@ __all__.append('get_dbchange')
 
 ################################################################################
 
-class Location(object):
-    def __init__(self, path = None):
+# TODO: Why do we have a separate Location class? Can't it be fully integrated
+# into class Component?
+class Location(ORMObject):
+    def __init__(self, path = None, component = None):
         self.path = path
+        self.component = component
         # the column 'type' should go away, see comment at mapper
         self.archive_type = 'pool'
 
-    def __repr__(self):
-        return '<Location %s (%s)>' % (self.path, self.location_id)
+    def properties(self):
+        return ['path', 'archive_type', 'component', 'files_count']
+
+    def not_null_constraints(self):
+        return ['path', 'archive_type']
 
 __all__.append('Location')
 
@@ -1622,12 +1629,15 @@ __all__.append('get_location')
 
 ################################################################################
 
-class Maintainer(object):
+class Maintainer(ORMObject):
     def __init__(self, name = None):
         self.name = name
 
-    def __repr__(self):
-        return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
+    def properties(self):
+        return ['name', 'maintainer_id']
+
+    def not_null_constraints(self):
+        return ['name']
 
     def get_split_maintainer(self):
         if not hasattr(self, 'name') or self.name is None:
@@ -2161,7 +2171,7 @@ __all__.append('get_sections')
 
 ################################################################################
 
-class DBSource(object):
+class DBSource(ORMObject):
     def __init__(self, source = None, version = None, maintainer = None, \
         changedby = None, poolfile = None, install_date = None):
         self.source = source
@@ -2171,8 +2181,14 @@ class DBSource(object):
         self.poolfile = poolfile
         self.install_date = install_date
 
-    def __repr__(self):
-        return '<DBSource %s (%s)>' % (self.source, self.version)
+    def properties(self):
+        return ['source', 'source_id', 'maintainer', 'changedby', \
+            'fingerprint', 'poolfile', 'version', 'suites_count', \
+            'install_date', 'binaries_count']
+
+    def not_null_constraints(self):
+        return ['source', 'version', 'install_date', 'maintainer', \
+            'changedby', 'poolfile', 'install_date']
 
 __all__.append('DBSource')
 
@@ -2469,14 +2485,10 @@ def add_deb_to_db(u, filename, session=None):
 
     # Add and flush object so it has an ID
     session.add(bin)
-    session.flush()
 
-    # Add BinAssociations
-    for suite_name in u.pkg.changes["distribution"].keys():
-        ba = BinAssociation()
-        ba.binary_id = bin.binary_id
-        ba.suite_id = get_suite(suite_name).suite_id
-        session.add(ba)
+    suite_names = u.pkg.changes["distribution"].keys()
+    bin.suites = session.query(Suite). \
+        filter(Suite.suite_name.in_(suite_names)).all()
 
     session.flush()
 
@@ -2544,13 +2556,16 @@ SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
 
 # Why the heck don't we have any UNIQUE constraints in table suite?
 # TODO: Add UNIQUE constraints for appropriate columns.
-class Suite(object):
+class Suite(ORMObject):
     def __init__(self, suite_name = None, version = None):
         self.suite_name = suite_name
         self.version = version
 
-    def __repr__(self):
-        return '<Suite %s>' % self.suite_name
+    def properties(self):
+        return ['suite_name', 'version', 'sources_count', 'binaries_count']
+
+    def not_null_constraints(self):
+        return ['suite_name', 'version']
 
     def __eq__(self, val):
         if isinstance(val, str):
@@ -2710,7 +2725,7 @@ __all__.append('get_suite_src_formats')
 
 ################################################################################
 
-class Uid(object):
+class Uid(ORMObject):
     def __init__(self, uid = None, name = None):
         self.uid = uid
         self.name = name
@@ -2727,8 +2742,11 @@ class Uid(object):
         # This signals to use the normal comparison operator
         return NotImplemented
 
-    def __repr__(self):
-        return '<Uid %s (%s)>' % (self.uid, self.name)
+    def properties(self):
+        return ['uid', 'name', 'fingerprint']
+
+    def not_null_constraints(self):
+        return ['uid']
 
 __all__.append('Uid')
 
@@ -2907,22 +2925,16 @@ class DBConn(object):
 
     def __setupmappers(self):
         mapper(Architecture, self.tbl_architecture,
-           properties = dict(arch_id = self.tbl_architecture.c.id,
+            properties = dict(arch_id = self.tbl_architecture.c.id,
                suites = relation(Suite, secondary=self.tbl_suite_architectures,
                    order_by='suite_name',
-                   backref=backref('architectures', order_by='arch_string'))))
+                   backref=backref('architectures', order_by='arch_string'))),
+            extension = validator)
 
         mapper(Archive, self.tbl_archive,
                properties = dict(archive_id = self.tbl_archive.c.id,
                                  archive_name = self.tbl_archive.c.name))
 
-        mapper(BinAssociation, self.tbl_bin_associations,
-               properties = dict(ba_id = self.tbl_bin_associations.c.id,
-                                 suite_id = self.tbl_bin_associations.c.suite,
-                                 suite = relation(Suite),
-                                 binary_id = self.tbl_bin_associations.c.bin,
-                                 binary = relation(DBBinary)))
-
         mapper(PendingBinContents, self.tbl_pending_bin_contents,
                properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
                                  filename = self.tbl_pending_bin_contents.c.filename,
@@ -2961,17 +2973,18 @@ class DBConn(object):
                                  maintainer_id = self.tbl_binaries.c.maintainer,
                                  maintainer = relation(Maintainer),
                                  source_id = self.tbl_binaries.c.source,
-                                 source = relation(DBSource),
+                                 source = relation(DBSource, backref='binaries'),
                                  arch_id = self.tbl_binaries.c.architecture,
                                  architecture = relation(Architecture),
                                  poolfile_id = self.tbl_binaries.c.file,
-                                 poolfile = relation(PoolFile),
+                                 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
                                  binarytype = self.tbl_binaries.c.type,
                                  fingerprint_id = self.tbl_binaries.c.sig_fpr,
                                  fingerprint = relation(Fingerprint),
                                  install_date = self.tbl_binaries.c.install_date,
-                                 binassociations = relation(BinAssociation,
-                                                            primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
+                                 suites = relation(Suite, secondary=self.tbl_bin_associations,
+                                     backref=backref('binaries', lazy='dynamic'))),
+                extension = validator)
 
         mapper(BinaryACL, self.tbl_binary_acl,
                properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
@@ -2983,7 +2996,8 @@ class DBConn(object):
 
         mapper(Component, self.tbl_component,
                properties = dict(component_id = self.tbl_component.c.id,
-                                 component_name = self.tbl_component.c.name))
+                                 component_name = self.tbl_component.c.name),
+               extension = validator)
 
         mapper(DBConfig, self.tbl_config,
                properties = dict(config_id = self.tbl_config.c.id))
@@ -3003,7 +3017,8 @@ class DBConn(object):
                                      # using lazy='dynamic' in the back
                                      # reference because we have A LOT of
                                      # files in one location
-                                     backref=backref('files', lazy='dynamic'))))
+                                     backref=backref('files', lazy='dynamic'))),
+                extension = validator)
 
         mapper(Fingerprint, self.tbl_fingerprint,
                properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
@@ -3012,7 +3027,8 @@ class DBConn(object):
                                  keyring_id = self.tbl_fingerprint.c.keyring,
                                  keyring = relation(Keyring),
                                  source_acl = relation(SourceACL),
-                                 binary_acl = relation(BinaryACL)))
+                                 binary_acl = relation(BinaryACL)),
+               extension = validator)
 
         mapper(Keyring, self.tbl_keyrings,
                properties = dict(keyring_name = self.tbl_keyrings.c.name,
@@ -3073,19 +3089,22 @@ class DBConn(object):
         mapper(Location, self.tbl_location,
                properties = dict(location_id = self.tbl_location.c.id,
                                  component_id = self.tbl_location.c.component,
-                                 component = relation(Component),
+                                 component = relation(Component, \
+                                     backref=backref('location', uselist = False)),
                                  archive_id = self.tbl_location.c.archive,
                                  archive = relation(Archive),
                                  # FIXME: the 'type' column is old cruft and
                                  # should be removed in the future.
-                                 archive_type = self.tbl_location.c.type))
+                                 archive_type = self.tbl_location.c.type),
+               extension = validator)
 
         mapper(Maintainer, self.tbl_maintainer,
                properties = dict(maintainer_id = self.tbl_maintainer.c.id,
                    maintains_sources = relation(DBSource, backref='maintainer',
                        primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
                    changed_sources = relation(DBSource, backref='changedby',
-                       primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))))
+                       primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
+                extension = validator)
 
         mapper(NewComment, self.tbl_new_comments,
                properties = dict(comment_id = self.tbl_new_comments.c.id))
@@ -3129,8 +3148,9 @@ class DBConn(object):
                                  srcfiles = relation(DSCFile,
                                                      primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
                                  suites = relation(Suite, secondary=self.tbl_src_associations,
-                                     backref='sources'),
-                                 srcuploaders = relation(SrcUploader)))
+                                     backref=backref('sources', lazy='dynamic')),
+                                 srcuploaders = relation(SrcUploader)),
+               extension = validator)
 
         mapper(SourceACL, self.tbl_source_acl,
                properties = dict(source_acl_id = self.tbl_source_acl.c.id))
@@ -3151,7 +3171,9 @@ class DBConn(object):
         mapper(Suite, self.tbl_suite,
                properties = dict(suite_id = self.tbl_suite.c.id,
                                  policy_queue = relation(PolicyQueue),
-                                 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
+                                 copy_queues = relation(BuildQueue,
+                                     secondary=self.tbl_suite_build_queue_copy)),
+                extension = validator)
 
         mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
                properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
@@ -3161,7 +3183,8 @@ class DBConn(object):
 
         mapper(Uid, self.tbl_uid,
                properties = dict(uid_id = self.tbl_uid.c.id,
-                                 fingerprint = relation(Fingerprint)))
+                                 fingerprint = relation(Fingerprint)),
+               extension = validator)
 
         mapper(UploadBlock, self.tbl_upload_blocks,
                properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,