X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=73b1b48d0ae770d96075744c9192f8051acbf2bf;hb=0856760278c13dfe42ddffa54a7cdd1df5db9d1d;hp=e8a68390c5989bfdffbc655785c413c1d550f33f;hpb=c94d8bb3dc23bf45f88fa9e53e6dd0ca767d8725;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py index e8a68390..73b1b48d 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -53,9 +53,10 @@ from tempfile import mkstemp, mkdtemp from inspect import getargspec import sqlalchemy -from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc +from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \ + Text, ForeignKey from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \ - backref, MapperExtension, EXT_CONTINUE + backref, MapperExtension, EXT_CONTINUE, object_mapper from sqlalchemy import types as sqltypes # Don't remove this, we re-export the exceptions to scripts which import us @@ -287,6 +288,50 @@ class ORMObject(object): ''' return session.query(cls).get(primary_key) + def session(self, replace = False): + ''' + Returns the current session that is associated with the object. May + return None is object is in detached state. + ''' + + return object_session(self) + + def clone(self, session = None): + ''' + Clones the current object in a new session and returns the new clone. A + fresh session is created if the optional session parameter is not + provided. The function will fail if a session is provided and has + unflushed changes. + + RATIONALE: SQLAlchemy's session is not thread safe. This method clones + an existing object to allow several threads to work with their own + instances of an ORMObject. + + WARNING: Only persistent (committed) objects can be cloned. Changes + made to the original object that are not committed yet will get lost. + The session of the new object will always be rolled back to avoid + ressource leaks. + ''' + + if self.session() is None: + raise RuntimeError( \ + 'Method clone() failed for detached object:\n%s' % self) + self.session().flush() + mapper = object_mapper(self) + primary_key = mapper.primary_key_from_instance(self) + object_class = self.__class__ + if session is None: + session = DBConn().session() + elif len(session.new) + len(session.dirty) + len(session.deleted) > 0: + raise RuntimeError( \ + 'Method clone() failed due to unflushed changes in session.') + new_object = session.query(object_class).get(primary_key) + session.rollback() + if new_object is None: + raise RuntimeError( \ + 'Method clone() failed for non-persistent object:\n%s' % self) + return new_object + __all__.append('ORMObject') ################################################################################ @@ -421,12 +466,13 @@ __all__.append('get_archive') ################################################################################ -class BinContents(object): - def __init__(self, *args, **kwargs): - pass +class BinContents(ORMObject): + def __init__(self, file = None, binary = None): + self.file = file + self.binary = binary - def __repr__(self): - return '' % (self.binary, self.filename) + def properties(self): + return ['file', 'binary'] __all__.append('BinContents') @@ -447,7 +493,7 @@ class DBBinary(ORMObject): def properties(self): return ['package', 'version', 'maintainer', 'source', 'architecture', \ 'poolfile', 'binarytype', 'fingerprint', 'install_date', \ - 'suites_count', 'binary_id'] + 'suites_count', 'binary_id', 'contents_count'] def not_null_constraints(self): return ['package', 'version', 'maintainer', 'source', 'poolfile', \ @@ -1161,7 +1207,7 @@ class PoolFile(ORMObject): def fullpath(self): return os.path.join(self.location.path, self.filename) - def is_valid(self, filesize = -1, md5sum = None):\ + def is_valid(self, filesize = -1, md5sum = None): return self.filesize == long(filesize) and self.md5sum == md5sum def properties(self): @@ -1583,7 +1629,8 @@ class Location(ORMObject): self.archive_type = 'pool' def properties(self): - return ['path', 'archive_type', 'component', 'files_count'] + return ['path', 'location_id', 'archive_type', 'component', \ + 'files_count'] def not_null_constraints(self): return ['path', 'archive_type'] @@ -1826,12 +1873,15 @@ __all__.append('get_override') ################################################################################ -class OverrideType(object): - def __init__(self, *args, **kwargs): - pass +class OverrideType(ORMObject): + def __init__(self, overridetype = None): + self.overridetype = overridetype - def __repr__(self): - return '' % self.overridetype + def properties(self): + return ['overridetype', 'overridetype_id'] + + def not_null_constraints(self): + return ['overridetype'] __all__.append('OverrideType') @@ -2829,7 +2879,9 @@ class DBConn(object): 'binary_acl', 'binary_acl_map', 'build_queue', + 'build_queue_files', 'changelogs_text', + 'changes', 'component', 'config', 'changes_pending_binaries', @@ -2856,15 +2908,9 @@ class DBConn(object): 'suite', 'uid', 'upload_blocks', - # The following tables have primary keys but sqlalchemy - # version 0.5 fails to reflect them correctly with database - # versions before upgrade #41. - #'changes', - #'build_queue_files', ) tables_no_primary = ( - 'bin_contents', 'changes_pending_files_map', 'changes_pending_source_files', 'changes_pool_files', @@ -2874,9 +2920,6 @@ class DBConn(object): 'suite_src_formats', 'suite_build_queue_copy', 'udeb_contents', - # see the comment above - 'changes', - 'build_queue_files', ) views = ( @@ -2916,6 +2959,14 @@ class DBConn(object): table = Table(table_name, self.db_meta, autoload=True) setattr(self, 'tbl_%s' % table_name, table) + # bin_contents needs special attention until update #41 has been + # applied + self.tbl_bin_contents = Table('bin_contents', self.db_meta, \ + Column('file', Text, primary_key = True), + Column('binary_id', Integer, ForeignKey('binaries.id'), \ + primary_key = True), + autoload=True, useexisting=True) + for view_name in views: view = Table(view_name, self.db_meta, autoload=True) setattr(self, 'view_%s' % view_name, view) @@ -3117,7 +3168,8 @@ class DBConn(object): section_id = self.tbl_override.c.section, section = relation(Section), overridetype_id = self.tbl_override.c.type, - overridetype = relation(OverrideType))) + overridetype = relation(OverrideType, \ + backref=backref('overrides', lazy='dynamic')))) mapper(OverrideType, self.tbl_override_type, properties = dict(overridetype = self.tbl_override_type.c.type, @@ -3188,6 +3240,12 @@ class DBConn(object): fingerprint = relation(Fingerprint, backref="uploadblocks"), uid = relation(Uid, backref="uploadblocks"))) + mapper(BinContents, self.tbl_bin_contents, + properties = dict( + binary = relation(DBBinary, + backref=backref('contents', lazy='dynamic')), + file = self.tbl_bin_contents.c.file)) + ## Connection functions def __createconn(self): from config import Config