5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 warnings.filterwarnings('ignore', \
85 "Predicate of partial index .* ignored during reflection", \
89 ################################################################################
91 # Patch in support for the debversion field type so that it works during
95 # that is for sqlalchemy 0.6
96 UserDefinedType = sqltypes.UserDefinedType
98 # this one for sqlalchemy 0.5
99 UserDefinedType = sqltypes.TypeEngine
101 class DebVersion(UserDefinedType):
102 def get_col_spec(self):
105 def bind_processor(self, dialect):
108 # ' = None' is needed for sqlalchemy 0.5:
109 def result_processor(self, dialect, coltype = None):
112 sa_major_version = sqlalchemy.__version__[0:3]
113 if sa_major_version in ["0.5", "0.6", "0.7"]:
114 from sqlalchemy.databases import postgres
115 postgres.ischema_names['debversion'] = DebVersion
117 raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py")
119 ################################################################################
121 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
123 ################################################################################
125 def session_wrapper(fn):
127 Wrapper around common ".., session=None):" handling. If the wrapped
128 function is called without passing 'session', we create a local one
129 and destroy it when the function ends.
131 Also attaches a commit_or_flush method to the session; if we created a
132 local session, this is a synonym for session.commit(), otherwise it is a
133 synonym for session.flush().
136 def wrapped(*args, **kwargs):
137 private_transaction = False
139 # Find the session object
140 session = kwargs.get('session')
143 if len(args) <= len(getargspec(fn)[0]) - 1:
144 # No session specified as last argument or in kwargs
145 private_transaction = True
146 session = kwargs['session'] = DBConn().session()
148 # Session is last argument in args
152 session = args[-1] = DBConn().session()
153 private_transaction = True
155 if private_transaction:
156 session.commit_or_flush = session.commit
158 session.commit_or_flush = session.flush
161 return fn(*args, **kwargs)
163 if private_transaction:
164 # We created a session; close it.
167 wrapped.__doc__ = fn.__doc__
168 wrapped.func_name = fn.func_name
172 __all__.append('session_wrapper')
174 ################################################################################
176 class ORMObject(object):
178 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
179 derived classes must implement the properties() method.
182 def properties(self):
184 This method should be implemented by all derived classes and returns a
185 list of the important properties. The properties 'created' and
186 'modified' will be added automatically. A suffix '_count' should be
187 added to properties that are lists or query objects. The most important
188 property name should be returned as the first element in the list
189 because it is used by repr().
195 Returns a JSON representation of the object based on the properties
196 returned from the properties() method.
199 # add created and modified
200 all_properties = self.properties() + ['created', 'modified']
201 for property in all_properties:
202 # check for list or query
203 if property[-6:] == '_count':
204 real_property = property[:-6]
205 if not hasattr(self, real_property):
207 value = getattr(self, real_property)
208 if hasattr(value, '__len__'):
211 elif hasattr(value, 'count'):
212 # query (but not during validation)
213 if self.in_validation:
215 value = value.count()
217 raise KeyError('Do not understand property %s.' % property)
219 if not hasattr(self, property):
222 value = getattr(self, property)
226 elif isinstance(value, ORMObject):
227 # use repr() for ORMObject types
230 # we want a string for all other types because json cannot
233 data[property] = value
234 return json.dumps(data)
238 Returns the name of the class.
240 return type(self).__name__
244 Returns a short string representation of the object using the first
245 element from the properties() method.
247 primary_property = self.properties()[0]
248 value = getattr(self, primary_property)
249 return '<%s %s>' % (self.classname(), str(value))
253 Returns a human readable form of the object using the properties()
256 return '<%s %s>' % (self.classname(), self.json())
258 def not_null_constraints(self):
260 Returns a list of properties that must be not NULL. Derived classes
261 should override this method if needed.
265 validation_message = \
266 "Validation failed because property '%s' must not be empty in object\n%s"
268 in_validation = False
272 This function validates the not NULL constraints as returned by
273 not_null_constraints(). It raises the DBUpdateError exception if
276 for property in self.not_null_constraints():
277 # TODO: It is a bit awkward that the mapper configuration allow
278 # directly setting the numeric _id columns. We should get rid of it
280 if hasattr(self, property + '_id') and \
281 getattr(self, property + '_id') is not None:
283 if not hasattr(self, property) or getattr(self, property) is None:
284 # str() might lead to races due to a 2nd flush
285 self.in_validation = True
286 message = self.validation_message % (property, str(self))
287 self.in_validation = False
288 raise DBUpdateError(message)
292 def get(cls, primary_key, session = None):
294 This is a support function that allows getting an object by its primary
297 Architecture.get(3[, session])
299 instead of the more verbose
301 session.query(Architecture).get(3)
303 return session.query(cls).get(primary_key)
305 def session(self, replace = False):
307 Returns the current session that is associated with the object. May
308 return None is object is in detached state.
311 return object_session(self)
313 def clone(self, session = None):
315 Clones the current object in a new session and returns the new clone. A
316 fresh session is created if the optional session parameter is not
317 provided. The function will fail if a session is provided and has
320 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
321 an existing object to allow several threads to work with their own
322 instances of an ORMObject.
324 WARNING: Only persistent (committed) objects can be cloned. Changes
325 made to the original object that are not committed yet will get lost.
326 The session of the new object will always be rolled back to avoid
330 if self.session() is None:
331 raise RuntimeError( \
332 'Method clone() failed for detached object:\n%s' % self)
333 self.session().flush()
334 mapper = object_mapper(self)
335 primary_key = mapper.primary_key_from_instance(self)
336 object_class = self.__class__
338 session = DBConn().session()
339 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
340 raise RuntimeError( \
341 'Method clone() failed due to unflushed changes in session.')
342 new_object = session.query(object_class).get(primary_key)
344 if new_object is None:
345 raise RuntimeError( \
346 'Method clone() failed for non-persistent object:\n%s' % self)
349 __all__.append('ORMObject')
351 ################################################################################
353 class Validator(MapperExtension):
355 This class calls the validate() method for each instance for the
356 'before_update' and 'before_insert' events. A global object validator is
357 used for configuring the individual mappers.
360 def before_update(self, mapper, connection, instance):
364 def before_insert(self, mapper, connection, instance):
368 validator = Validator()
370 ################################################################################
372 class Architecture(ORMObject):
373 def __init__(self, arch_string = None, description = None):
374 self.arch_string = arch_string
375 self.description = description
377 def __eq__(self, val):
378 if isinstance(val, str):
379 return (self.arch_string== val)
380 # This signals to use the normal comparison operator
381 return NotImplemented
383 def __ne__(self, val):
384 if isinstance(val, str):
385 return (self.arch_string != val)
386 # This signals to use the normal comparison operator
387 return NotImplemented
389 def properties(self):
390 return ['arch_string', 'arch_id', 'suites_count']
392 def not_null_constraints(self):
393 return ['arch_string']
395 __all__.append('Architecture')
398 def get_architecture(architecture, session=None):
400 Returns database id for given C{architecture}.
402 @type architecture: string
403 @param architecture: The name of the architecture
405 @type session: Session
406 @param session: Optional SQLA session object (a temporary one will be
407 generated if not supplied)
410 @return: Architecture object for the given arch (None if not present)
413 q = session.query(Architecture).filter_by(arch_string=architecture)
417 except NoResultFound:
420 __all__.append('get_architecture')
422 # TODO: should be removed because the implementation is too trivial
424 def get_architecture_suites(architecture, session=None):
426 Returns list of Suite objects for given C{architecture} name
428 @type architecture: str
429 @param architecture: Architecture name to search for
431 @type session: Session
432 @param session: Optional SQL session object (a temporary one will be
433 generated if not supplied)
436 @return: list of Suite objects for the given name (may be empty)
439 return get_architecture(architecture, session).suites
441 __all__.append('get_architecture_suites')
443 ################################################################################
445 class Archive(object):
446 def __init__(self, *args, **kwargs):
450 return '<Archive %s>' % self.archive_name
452 __all__.append('Archive')
455 def get_archive(archive, session=None):
457 returns database id for given C{archive}.
459 @type archive: string
460 @param archive: the name of the arhive
462 @type session: Session
463 @param session: Optional SQLA session object (a temporary one will be
464 generated if not supplied)
467 @return: Archive object for the given name (None if not present)
470 archive = archive.lower()
472 q = session.query(Archive).filter_by(archive_name=archive)
476 except NoResultFound:
479 __all__.append('get_archive')
481 ################################################################################
483 class ArchiveFile(object):
484 def __init__(self, archive=None, component=None, file=None):
485 self.archive = archive
486 self.component = component
490 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
492 __all__.append('ArchiveFile')
494 ################################################################################
496 class BinContents(ORMObject):
497 def __init__(self, file = None, binary = None):
501 def properties(self):
502 return ['file', 'binary']
504 __all__.append('BinContents')
506 ################################################################################
508 def subprocess_setup():
509 # Python installs a SIGPIPE handler by default. This is usually not what
510 # non-Python subprocesses expect.
511 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
513 class DBBinary(ORMObject):
514 def __init__(self, package = None, source = None, version = None, \
515 maintainer = None, architecture = None, poolfile = None, \
516 binarytype = 'deb', fingerprint=None):
517 self.package = package
519 self.version = version
520 self.maintainer = maintainer
521 self.architecture = architecture
522 self.poolfile = poolfile
523 self.binarytype = binarytype
524 self.fingerprint = fingerprint
528 return self.binary_id
530 def properties(self):
531 return ['package', 'version', 'maintainer', 'source', 'architecture', \
532 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
533 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
535 def not_null_constraints(self):
536 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
539 metadata = association_proxy('key', 'value')
541 def scan_contents(self):
543 Yields the contents of the package. Only regular files are yielded and
544 the path names are normalized after converting them from either utf-8
545 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
546 package does not contain any regular file.
548 fullpath = self.poolfile.fullpath
549 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
550 preexec_fn = subprocess_setup)
551 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
552 for member in tar.getmembers():
553 if not member.isdir():
554 name = normpath(member.name)
555 # enforce proper utf-8 encoding
558 except UnicodeDecodeError:
559 name = name.decode('iso8859-1').encode('utf-8')
565 def read_control(self):
567 Reads the control information from a binary.
570 @return: stanza text of the control section.
573 fullpath = self.poolfile.fullpath
574 deb_file = open(fullpath, 'r')
575 stanza = utils.deb_extract_control(deb_file)
580 def read_control_fields(self):
582 Reads the control information from a binary and return
586 @return: fields of the control section as a dictionary.
589 stanza = self.read_control()
590 return apt_pkg.TagSection(stanza)
592 __all__.append('DBBinary')
595 def get_suites_binary_in(package, session=None):
597 Returns list of Suite objects which given C{package} name is in
600 @param package: DBBinary package name to search for
603 @return: list of Suite objects for the given package
606 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
608 __all__.append('get_suites_binary_in')
611 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
613 Returns the component name of the newest binary package in suite_list or
614 None if no package is found. The result can be optionally filtered by a list
615 of architecture names.
618 @param package: DBBinary package name to search for
620 @type suite_list: list of str
621 @param suite_list: list of suite_name items
623 @type arch_list: list of str
624 @param arch_list: optional list of arch_string items that defaults to []
626 @rtype: str or NoneType
627 @return: name of component or None
630 q = session.query(DBBinary).filter_by(package = package). \
631 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
632 if len(arch_list) > 0:
633 q = q.join(DBBinary.architecture). \
634 filter(Architecture.arch_string.in_(arch_list))
635 binary = q.order_by(desc(DBBinary.version)).first()
639 return binary.poolfile.component.component_name
641 __all__.append('get_component_by_package_suite')
643 ################################################################################
645 class BinaryACL(object):
646 def __init__(self, *args, **kwargs):
650 return '<BinaryACL %s>' % self.binary_acl_id
652 __all__.append('BinaryACL')
654 ################################################################################
656 class BinaryACLMap(object):
657 def __init__(self, *args, **kwargs):
661 return '<BinaryACLMap %s>' % self.binary_acl_map_id
663 __all__.append('BinaryACLMap')
665 ################################################################################
667 class BuildQueue(object):
668 def __init__(self, *args, **kwargs):
672 return '<BuildQueue %s>' % self.queue_name
674 __all__.append('BuildQueue')
676 ################################################################################
678 class Component(ORMObject):
679 def __init__(self, component_name = None):
680 self.component_name = component_name
682 def __eq__(self, val):
683 if isinstance(val, str):
684 return (self.component_name == val)
685 # This signals to use the normal comparison operator
686 return NotImplemented
688 def __ne__(self, val):
689 if isinstance(val, str):
690 return (self.component_name != val)
691 # This signals to use the normal comparison operator
692 return NotImplemented
694 def properties(self):
695 return ['component_name', 'component_id', 'description', \
696 'meets_dfsg', 'overrides_count']
698 def not_null_constraints(self):
699 return ['component_name']
702 __all__.append('Component')
705 def get_component(component, session=None):
707 Returns database id for given C{component}.
709 @type component: string
710 @param component: The name of the override type
713 @return: the database id for the given component
716 component = component.lower()
718 q = session.query(Component).filter_by(component_name=component)
722 except NoResultFound:
725 __all__.append('get_component')
728 def get_mapped_component(component_name, session=None):
729 """get component after mappings
731 Evaluate component mappings from ComponentMappings in dak.conf for the
732 given component name.
734 @todo: ansgar wants to get rid of this. It's currently only used for
737 @type component_name: str
738 @param component_name: component name
740 @param session: database session
742 @rtype: L{daklib.dbconn.Component} or C{None}
743 @return: component after applying maps or C{None}
746 for m in cnf.value_list("ComponentMappings"):
747 (src, dst) = m.split()
748 if component_name == src:
750 component = session.query(Component).filter_by(component_name=component_name).first()
753 __all__.append('get_mapped_component')
756 def get_component_names(session=None):
758 Returns list of strings of component names.
761 @return: list of strings of component names
764 return [ x.component_name for x in session.query(Component).all() ]
766 __all__.append('get_component_names')
768 ################################################################################
770 class DBConfig(object):
771 def __init__(self, *args, **kwargs):
775 return '<DBConfig %s>' % self.name
777 __all__.append('DBConfig')
779 ################################################################################
782 def get_or_set_contents_file_id(filename, session=None):
784 Returns database id for given filename.
786 If no matching file is found, a row is inserted.
788 @type filename: string
789 @param filename: The filename
790 @type session: SQLAlchemy
791 @param session: Optional SQL session object (a temporary one will be
792 generated if not supplied). If not passed, a commit will be performed at
793 the end of the function, otherwise the caller is responsible for commiting.
796 @return: the database id for the given component
799 q = session.query(ContentFilename).filter_by(filename=filename)
802 ret = q.one().cafilename_id
803 except NoResultFound:
804 cf = ContentFilename()
805 cf.filename = filename
807 session.commit_or_flush()
808 ret = cf.cafilename_id
812 __all__.append('get_or_set_contents_file_id')
815 def get_contents(suite, overridetype, section=None, session=None):
817 Returns contents for a suite / overridetype combination, limiting
818 to a section if not None.
821 @param suite: Suite object
823 @type overridetype: OverrideType
824 @param overridetype: OverrideType object
826 @type section: Section
827 @param section: Optional section object to limit results to
829 @type session: SQLAlchemy
830 @param session: Optional SQL session object (a temporary one will be
831 generated if not supplied)
834 @return: ResultsProxy object set up to return tuples of (filename, section,
838 # find me all of the contents for a given suite
839 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
843 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
844 JOIN content_file_names n ON (c.filename=n.id)
845 JOIN binaries b ON (b.id=c.binary_pkg)
846 JOIN override o ON (o.package=b.package)
847 JOIN section s ON (s.id=o.section)
848 WHERE o.suite = :suiteid AND o.type = :overridetypeid
849 AND b.type=:overridetypename"""
851 vals = {'suiteid': suite.suite_id,
852 'overridetypeid': overridetype.overridetype_id,
853 'overridetypename': overridetype.overridetype}
855 if section is not None:
856 contents_q += " AND s.id = :sectionid"
857 vals['sectionid'] = section.section_id
859 contents_q += " ORDER BY fn"
861 return session.execute(contents_q, vals)
863 __all__.append('get_contents')
865 ################################################################################
867 class ContentFilepath(object):
868 def __init__(self, *args, **kwargs):
872 return '<ContentFilepath %s>' % self.filepath
874 __all__.append('ContentFilepath')
877 def get_or_set_contents_path_id(filepath, session=None):
879 Returns database id for given path.
881 If no matching file is found, a row is inserted.
883 @type filepath: string
884 @param filepath: The filepath
886 @type session: SQLAlchemy
887 @param session: Optional SQL session object (a temporary one will be
888 generated if not supplied). If not passed, a commit will be performed at
889 the end of the function, otherwise the caller is responsible for commiting.
892 @return: the database id for the given path
895 q = session.query(ContentFilepath).filter_by(filepath=filepath)
898 ret = q.one().cafilepath_id
899 except NoResultFound:
900 cf = ContentFilepath()
901 cf.filepath = filepath
903 session.commit_or_flush()
904 ret = cf.cafilepath_id
908 __all__.append('get_or_set_contents_path_id')
910 ################################################################################
912 class ContentAssociation(object):
913 def __init__(self, *args, **kwargs):
917 return '<ContentAssociation %s>' % self.ca_id
919 __all__.append('ContentAssociation')
921 def insert_content_paths(binary_id, fullpaths, session=None):
923 Make sure given path is associated with given binary id
926 @param binary_id: the id of the binary
927 @type fullpaths: list
928 @param fullpaths: the list of paths of the file being associated with the binary
929 @type session: SQLAlchemy session
930 @param session: Optional SQLAlchemy session. If this is passed, the caller
931 is responsible for ensuring a transaction has begun and committing the
932 results or rolling back based on the result code. If not passed, a commit
933 will be performed at the end of the function, otherwise the caller is
934 responsible for commiting.
936 @return: True upon success
941 session = DBConn().session()
946 def generate_path_dicts():
947 for fullpath in fullpaths:
948 if fullpath.startswith( './' ):
949 fullpath = fullpath[2:]
951 yield {'filename':fullpath, 'id': binary_id }
953 for d in generate_path_dicts():
954 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
963 traceback.print_exc()
965 # Only rollback if we set up the session ourself
972 __all__.append('insert_content_paths')
974 ################################################################################
976 class DSCFile(object):
977 def __init__(self, *args, **kwargs):
981 return '<DSCFile %s>' % self.dscfile_id
983 __all__.append('DSCFile')
986 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
988 Returns a list of DSCFiles which may be empty
990 @type dscfile_id: int (optional)
991 @param dscfile_id: the dscfile_id of the DSCFiles to find
993 @type source_id: int (optional)
994 @param source_id: the source id related to the DSCFiles to find
996 @type poolfile_id: int (optional)
997 @param poolfile_id: the poolfile id related to the DSCFiles to find
1000 @return: Possibly empty list of DSCFiles
1003 q = session.query(DSCFile)
1005 if dscfile_id is not None:
1006 q = q.filter_by(dscfile_id=dscfile_id)
1008 if source_id is not None:
1009 q = q.filter_by(source_id=source_id)
1011 if poolfile_id is not None:
1012 q = q.filter_by(poolfile_id=poolfile_id)
1016 __all__.append('get_dscfiles')
1018 ################################################################################
1020 class ExternalOverride(ORMObject):
1021 def __init__(self, *args, **kwargs):
1025 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1027 __all__.append('ExternalOverride')
1029 ################################################################################
1031 class PoolFile(ORMObject):
1032 def __init__(self, filename = None, filesize = -1, \
1034 self.filename = filename
1035 self.filesize = filesize
1036 self.md5sum = md5sum
1040 session = DBConn().session().object_session(self)
1041 af = session.query(ArchiveFile).join(Archive).filter(ArchiveFile.file == self).first()
1045 def component(self):
1046 session = DBConn().session().object_session(self)
1047 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1048 .group_by(ArchiveFile.component_id).one()
1049 return session.query(Component).get(component_id)
1053 return os.path.basename(self.filename)
1055 def is_valid(self, filesize = -1, md5sum = None):
1056 return self.filesize == long(filesize) and self.md5sum == md5sum
1058 def properties(self):
1059 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1060 'sha256sum', 'source', 'binary', 'last_used']
1062 def not_null_constraints(self):
1063 return ['filename', 'md5sum']
1065 def identical_to(self, filename):
1067 compare size and hash with the given file
1070 @return: true if the given file has the same size and hash as this object; false otherwise
1072 st = os.stat(filename)
1073 if self.filesize != st.st_size:
1076 f = open(filename, "r")
1077 sha256sum = apt_pkg.sha256sum(f)
1078 if sha256sum != self.sha256sum:
1083 __all__.append('PoolFile')
1086 def get_poolfile_like_name(filename, session=None):
1088 Returns an array of PoolFile objects which are like the given name
1090 @type filename: string
1091 @param filename: the filename of the file to check against the DB
1094 @return: array of PoolFile objects
1097 # TODO: There must be a way of properly using bind parameters with %FOO%
1098 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1102 __all__.append('get_poolfile_like_name')
1104 ################################################################################
1106 class Fingerprint(ORMObject):
1107 def __init__(self, fingerprint = None):
1108 self.fingerprint = fingerprint
1110 def properties(self):
1111 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1114 def not_null_constraints(self):
1115 return ['fingerprint']
1117 __all__.append('Fingerprint')
1120 def get_fingerprint(fpr, session=None):
1122 Returns Fingerprint object for given fpr.
1125 @param fpr: The fpr to find / add
1127 @type session: SQLAlchemy
1128 @param session: Optional SQL session object (a temporary one will be
1129 generated if not supplied).
1132 @return: the Fingerprint object for the given fpr or None
1135 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1139 except NoResultFound:
1144 __all__.append('get_fingerprint')
1147 def get_or_set_fingerprint(fpr, session=None):
1149 Returns Fingerprint object for given fpr.
1151 If no matching fpr is found, a row is inserted.
1154 @param fpr: The fpr to find / add
1156 @type session: SQLAlchemy
1157 @param session: Optional SQL session object (a temporary one will be
1158 generated if not supplied). If not passed, a commit will be performed at
1159 the end of the function, otherwise the caller is responsible for commiting.
1160 A flush will be performed either way.
1163 @return: the Fingerprint object for the given fpr
1166 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1170 except NoResultFound:
1171 fingerprint = Fingerprint()
1172 fingerprint.fingerprint = fpr
1173 session.add(fingerprint)
1174 session.commit_or_flush()
1179 __all__.append('get_or_set_fingerprint')
1181 ################################################################################
1183 # Helper routine for Keyring class
1184 def get_ldap_name(entry):
1186 for k in ["cn", "mn", "sn"]:
1188 if ret and ret[0] != "" and ret[0] != "-":
1190 return " ".join(name)
1192 ################################################################################
1194 class Keyring(object):
1195 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1196 " --with-colons --fingerprint --fingerprint"
1201 def __init__(self, *args, **kwargs):
1205 return '<Keyring %s>' % self.keyring_name
1207 def de_escape_gpg_str(self, txt):
1208 esclist = re.split(r'(\\x..)', txt)
1209 for x in range(1,len(esclist),2):
1210 esclist[x] = "%c" % (int(esclist[x][2:],16))
1211 return "".join(esclist)
1213 def parse_address(self, uid):
1214 """parses uid and returns a tuple of real name and email address"""
1216 (name, address) = email.Utils.parseaddr(uid)
1217 name = re.sub(r"\s*[(].*[)]", "", name)
1218 name = self.de_escape_gpg_str(name)
1221 return (name, address)
1223 def load_keys(self, keyring):
1224 if not self.keyring_id:
1225 raise Exception('Must be initialized with database information')
1227 k = os.popen(self.gpg_invocation % keyring, "r")
1232 field = line.split(":")
1233 if field[0] == "pub":
1236 (name, addr) = self.parse_address(field[9])
1238 self.keys[key]["email"] = addr
1239 self.keys[key]["name"] = name
1240 self.keys[key]["fingerprints"] = []
1242 elif key and field[0] == "sub" and len(field) >= 12:
1243 signingkey = ("s" in field[11])
1244 elif key and field[0] == "uid":
1245 (name, addr) = self.parse_address(field[9])
1246 if "email" not in self.keys[key] and "@" in addr:
1247 self.keys[key]["email"] = addr
1248 self.keys[key]["name"] = name
1249 elif signingkey and field[0] == "fpr":
1250 self.keys[key]["fingerprints"].append(field[9])
1251 self.fpr_lookup[field[9]] = key
1253 def import_users_from_ldap(self, session):
1257 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1258 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1260 l = ldap.open(LDAPServer)
1261 l.simple_bind_s("","")
1262 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1263 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1264 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1266 ldap_fin_uid_id = {}
1273 uid = entry["uid"][0]
1274 name = get_ldap_name(entry)
1275 fingerprints = entry["keyFingerPrint"]
1277 for f in fingerprints:
1278 key = self.fpr_lookup.get(f, None)
1279 if key not in self.keys:
1281 self.keys[key]["uid"] = uid
1285 keyid = get_or_set_uid(uid, session).uid_id
1286 byuid[keyid] = (uid, name)
1287 byname[uid] = (keyid, name)
1289 return (byname, byuid)
1291 def generate_users_from_keyring(self, format, session):
1295 for x in self.keys.keys():
1296 if "email" not in self.keys[x]:
1298 self.keys[x]["uid"] = format % "invalid-uid"
1300 uid = format % self.keys[x]["email"]
1301 keyid = get_or_set_uid(uid, session).uid_id
1302 byuid[keyid] = (uid, self.keys[x]["name"])
1303 byname[uid] = (keyid, self.keys[x]["name"])
1304 self.keys[x]["uid"] = uid
1307 uid = format % "invalid-uid"
1308 keyid = get_or_set_uid(uid, session).uid_id
1309 byuid[keyid] = (uid, "ungeneratable user id")
1310 byname[uid] = (keyid, "ungeneratable user id")
1312 return (byname, byuid)
1314 __all__.append('Keyring')
1317 def get_keyring(keyring, session=None):
1319 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1320 If C{keyring} already has an entry, simply return the existing Keyring
1322 @type keyring: string
1323 @param keyring: the keyring name
1326 @return: the Keyring object for this keyring
1329 q = session.query(Keyring).filter_by(keyring_name=keyring)
1333 except NoResultFound:
1336 __all__.append('get_keyring')
1339 def get_active_keyring_paths(session=None):
1342 @return: list of active keyring paths
1344 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1346 __all__.append('get_active_keyring_paths')
1349 def get_primary_keyring_path(session=None):
1351 Get the full path to the highest priority active keyring
1354 @return: path to the active keyring with the highest priority or None if no
1355 keyring is configured
1357 keyrings = get_active_keyring_paths()
1359 if len(keyrings) > 0:
1364 __all__.append('get_primary_keyring_path')
1366 ################################################################################
1368 class KeyringACLMap(object):
1369 def __init__(self, *args, **kwargs):
1373 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1375 __all__.append('KeyringACLMap')
1377 ################################################################################
1379 class DBChange(object):
1380 def __init__(self, *args, **kwargs):
1384 return '<DBChange %s>' % self.changesname
1386 __all__.append('DBChange')
1389 def get_dbchange(filename, session=None):
1391 returns DBChange object for given C{filename}.
1393 @type filename: string
1394 @param filename: the name of the file
1396 @type session: Session
1397 @param session: Optional SQLA session object (a temporary one will be
1398 generated if not supplied)
1401 @return: DBChange object for the given filename (C{None} if not present)
1404 q = session.query(DBChange).filter_by(changesname=filename)
1408 except NoResultFound:
1411 __all__.append('get_dbchange')
1413 ################################################################################
1415 class Maintainer(ORMObject):
1416 def __init__(self, name = None):
1419 def properties(self):
1420 return ['name', 'maintainer_id']
1422 def not_null_constraints(self):
1425 def get_split_maintainer(self):
1426 if not hasattr(self, 'name') or self.name is None:
1427 return ('', '', '', '')
1429 return fix_maintainer(self.name.strip())
1431 __all__.append('Maintainer')
1434 def get_or_set_maintainer(name, session=None):
1436 Returns Maintainer object for given maintainer name.
1438 If no matching maintainer name is found, a row is inserted.
1441 @param name: The maintainer name to add
1443 @type session: SQLAlchemy
1444 @param session: Optional SQL session object (a temporary one will be
1445 generated if not supplied). If not passed, a commit will be performed at
1446 the end of the function, otherwise the caller is responsible for commiting.
1447 A flush will be performed either way.
1450 @return: the Maintainer object for the given maintainer
1453 q = session.query(Maintainer).filter_by(name=name)
1456 except NoResultFound:
1457 maintainer = Maintainer()
1458 maintainer.name = name
1459 session.add(maintainer)
1460 session.commit_or_flush()
1465 __all__.append('get_or_set_maintainer')
1468 def get_maintainer(maintainer_id, session=None):
1470 Return the name of the maintainer behind C{maintainer_id} or None if that
1471 maintainer_id is invalid.
1473 @type maintainer_id: int
1474 @param maintainer_id: the id of the maintainer
1477 @return: the Maintainer with this C{maintainer_id}
1480 return session.query(Maintainer).get(maintainer_id)
1482 __all__.append('get_maintainer')
1484 ################################################################################
1486 class NewComment(object):
1487 def __init__(self, *args, **kwargs):
1491 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1493 __all__.append('NewComment')
1496 def has_new_comment(package, version, session=None):
1498 Returns true if the given combination of C{package}, C{version} has a comment.
1500 @type package: string
1501 @param package: name of the package
1503 @type version: string
1504 @param version: package version
1506 @type session: Session
1507 @param session: Optional SQLA session object (a temporary one will be
1508 generated if not supplied)
1514 q = session.query(NewComment)
1515 q = q.filter_by(package=package)
1516 q = q.filter_by(version=version)
1518 return bool(q.count() > 0)
1520 __all__.append('has_new_comment')
1523 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1525 Returns (possibly empty) list of NewComment objects for the given
1528 @type package: string (optional)
1529 @param package: name of the package
1531 @type version: string (optional)
1532 @param version: package version
1534 @type comment_id: int (optional)
1535 @param comment_id: An id of a comment
1537 @type session: Session
1538 @param session: Optional SQLA session object (a temporary one will be
1539 generated if not supplied)
1542 @return: A (possibly empty) list of NewComment objects will be returned
1545 q = session.query(NewComment)
1546 if package is not None: q = q.filter_by(package=package)
1547 if version is not None: q = q.filter_by(version=version)
1548 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1552 __all__.append('get_new_comments')
1554 ################################################################################
1556 class Override(ORMObject):
1557 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1558 section = None, priority = None):
1559 self.package = package
1561 self.component = component
1562 self.overridetype = overridetype
1563 self.section = section
1564 self.priority = priority
1566 def properties(self):
1567 return ['package', 'suite', 'component', 'overridetype', 'section', \
1570 def not_null_constraints(self):
1571 return ['package', 'suite', 'component', 'overridetype', 'section']
1573 __all__.append('Override')
1576 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1578 Returns Override object for the given parameters
1580 @type package: string
1581 @param package: The name of the package
1583 @type suite: string, list or None
1584 @param suite: The name of the suite (or suites if a list) to limit to. If
1585 None, don't limit. Defaults to None.
1587 @type component: string, list or None
1588 @param component: The name of the component (or components if a list) to
1589 limit to. If None, don't limit. Defaults to None.
1591 @type overridetype: string, list or None
1592 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1593 limit to. If None, don't limit. Defaults to None.
1595 @type session: Session
1596 @param session: Optional SQLA session object (a temporary one will be
1597 generated if not supplied)
1600 @return: A (possibly empty) list of Override objects will be returned
1603 q = session.query(Override)
1604 q = q.filter_by(package=package)
1606 if suite is not None:
1607 if not isinstance(suite, list): suite = [suite]
1608 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1610 if component is not None:
1611 if not isinstance(component, list): component = [component]
1612 q = q.join(Component).filter(Component.component_name.in_(component))
1614 if overridetype is not None:
1615 if not isinstance(overridetype, list): overridetype = [overridetype]
1616 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1620 __all__.append('get_override')
1623 ################################################################################
1625 class OverrideType(ORMObject):
1626 def __init__(self, overridetype = None):
1627 self.overridetype = overridetype
1629 def properties(self):
1630 return ['overridetype', 'overridetype_id', 'overrides_count']
1632 def not_null_constraints(self):
1633 return ['overridetype']
1635 __all__.append('OverrideType')
1638 def get_override_type(override_type, session=None):
1640 Returns OverrideType object for given C{override type}.
1642 @type override_type: string
1643 @param override_type: The name of the override type
1645 @type session: Session
1646 @param session: Optional SQLA session object (a temporary one will be
1647 generated if not supplied)
1650 @return: the database id for the given override type
1653 q = session.query(OverrideType).filter_by(overridetype=override_type)
1657 except NoResultFound:
1660 __all__.append('get_override_type')
1662 ################################################################################
1664 class PolicyQueue(object):
1665 def __init__(self, *args, **kwargs):
1669 return '<PolicyQueue %s>' % self.queue_name
1671 __all__.append('PolicyQueue')
1674 def get_policy_queue(queuename, session=None):
1676 Returns PolicyQueue object for given C{queue name}
1678 @type queuename: string
1679 @param queuename: The name of the queue
1681 @type session: Session
1682 @param session: Optional SQLA session object (a temporary one will be
1683 generated if not supplied)
1686 @return: PolicyQueue object for the given queue
1689 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1693 except NoResultFound:
1696 __all__.append('get_policy_queue')
1698 ################################################################################
1700 class PolicyQueueUpload(object):
1701 def __cmp__(self, other):
1702 ret = cmp(self.changes.source, other.changes.source)
1704 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1706 if self.source is not None and other.source is None:
1708 elif self.source is None and other.source is not None:
1711 ret = cmp(self.changes.changesname, other.changes.changesname)
1714 __all__.append('PolicyQueueUpload')
1716 ################################################################################
1718 class PolicyQueueByhandFile(object):
1721 __all__.append('PolicyQueueByhandFile')
1723 ################################################################################
1725 class Priority(ORMObject):
1726 def __init__(self, priority = None, level = None):
1727 self.priority = priority
1730 def properties(self):
1731 return ['priority', 'priority_id', 'level', 'overrides_count']
1733 def not_null_constraints(self):
1734 return ['priority', 'level']
1736 def __eq__(self, val):
1737 if isinstance(val, str):
1738 return (self.priority == val)
1739 # This signals to use the normal comparison operator
1740 return NotImplemented
1742 def __ne__(self, val):
1743 if isinstance(val, str):
1744 return (self.priority != val)
1745 # This signals to use the normal comparison operator
1746 return NotImplemented
1748 __all__.append('Priority')
1751 def get_priority(priority, session=None):
1753 Returns Priority object for given C{priority name}.
1755 @type priority: string
1756 @param priority: The name of the priority
1758 @type session: Session
1759 @param session: Optional SQLA session object (a temporary one will be
1760 generated if not supplied)
1763 @return: Priority object for the given priority
1766 q = session.query(Priority).filter_by(priority=priority)
1770 except NoResultFound:
1773 __all__.append('get_priority')
1776 def get_priorities(session=None):
1778 Returns dictionary of priority names -> id mappings
1780 @type session: Session
1781 @param session: Optional SQL session object (a temporary one will be
1782 generated if not supplied)
1785 @return: dictionary of priority names -> id mappings
1789 q = session.query(Priority)
1791 ret[x.priority] = x.priority_id
1795 __all__.append('get_priorities')
1797 ################################################################################
1799 class Section(ORMObject):
1800 def __init__(self, section = None):
1801 self.section = section
1803 def properties(self):
1804 return ['section', 'section_id', 'overrides_count']
1806 def not_null_constraints(self):
1809 def __eq__(self, val):
1810 if isinstance(val, str):
1811 return (self.section == val)
1812 # This signals to use the normal comparison operator
1813 return NotImplemented
1815 def __ne__(self, val):
1816 if isinstance(val, str):
1817 return (self.section != val)
1818 # This signals to use the normal comparison operator
1819 return NotImplemented
1821 __all__.append('Section')
1824 def get_section(section, session=None):
1826 Returns Section object for given C{section name}.
1828 @type section: string
1829 @param section: The name of the section
1831 @type session: Session
1832 @param session: Optional SQLA session object (a temporary one will be
1833 generated if not supplied)
1836 @return: Section object for the given section name
1839 q = session.query(Section).filter_by(section=section)
1843 except NoResultFound:
1846 __all__.append('get_section')
1849 def get_sections(session=None):
1851 Returns dictionary of section names -> id mappings
1853 @type session: Session
1854 @param session: Optional SQL session object (a temporary one will be
1855 generated if not supplied)
1858 @return: dictionary of section names -> id mappings
1862 q = session.query(Section)
1864 ret[x.section] = x.section_id
1868 __all__.append('get_sections')
1870 ################################################################################
1872 class SrcContents(ORMObject):
1873 def __init__(self, file = None, source = None):
1875 self.source = source
1877 def properties(self):
1878 return ['file', 'source']
1880 __all__.append('SrcContents')
1882 ################################################################################
1884 from debian.debfile import Deb822
1886 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1887 class Dak822(Deb822):
1888 def _internal_parser(self, sequence, fields=None):
1889 # The key is non-whitespace, non-colon characters before any colon.
1890 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1891 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1892 multi = re.compile(key_part + r"$")
1893 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1895 wanted_field = lambda f: fields is None or f in fields
1897 if isinstance(sequence, basestring):
1898 sequence = sequence.splitlines()
1902 for line in self.gpg_stripped_paragraph(sequence):
1903 m = single.match(line)
1906 self[curkey] = content
1908 if not wanted_field(m.group('key')):
1912 curkey = m.group('key')
1913 content = m.group('data')
1916 m = multi.match(line)
1919 self[curkey] = content
1921 if not wanted_field(m.group('key')):
1925 curkey = m.group('key')
1929 m = multidata.match(line)
1931 content += '\n' + line # XXX not m.group('data')?
1935 self[curkey] = content
1938 class DBSource(ORMObject):
1939 def __init__(self, source = None, version = None, maintainer = None, \
1940 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1941 self.source = source
1942 self.version = version
1943 self.maintainer = maintainer
1944 self.changedby = changedby
1945 self.poolfile = poolfile
1946 self.install_date = install_date
1947 self.fingerprint = fingerprint
1951 return self.source_id
1953 def properties(self):
1954 return ['source', 'source_id', 'maintainer', 'changedby', \
1955 'fingerprint', 'poolfile', 'version', 'suites_count', \
1956 'install_date', 'binaries_count', 'uploaders_count']
1958 def not_null_constraints(self):
1959 return ['source', 'version', 'install_date', 'maintainer', \
1960 'changedby', 'poolfile']
1962 def read_control_fields(self):
1964 Reads the control information from a dsc
1967 @return: fields is the dsc information in a dictionary form
1969 fullpath = self.poolfile.fullpath
1970 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1973 metadata = association_proxy('key', 'value')
1975 def scan_contents(self):
1977 Returns a set of names for non directories. The path names are
1978 normalized after converting them from either utf-8 or iso8859-1
1981 fullpath = self.poolfile.fullpath
1982 from daklib.contents import UnpackedSource
1983 unpacked = UnpackedSource(fullpath)
1985 for name in unpacked.get_all_filenames():
1986 # enforce proper utf-8 encoding
1988 name.decode('utf-8')
1989 except UnicodeDecodeError:
1990 name = name.decode('iso8859-1').encode('utf-8')
1994 __all__.append('DBSource')
1997 def source_exists(source, source_version, suites = ["any"], session=None):
1999 Ensure that source exists somewhere in the archive for the binary
2000 upload being processed.
2001 1. exact match => 1.0-3
2002 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2004 @type source: string
2005 @param source: source name
2007 @type source_version: string
2008 @param source_version: expected source version
2011 @param suites: list of suites to check in, default I{any}
2013 @type session: Session
2014 @param session: Optional SQLA session object (a temporary one will be
2015 generated if not supplied)
2018 @return: returns 1 if a source with expected version is found, otherwise 0
2025 from daklib.regexes import re_bin_only_nmu
2026 orig_source_version = re_bin_only_nmu.sub('', source_version)
2028 for suite in suites:
2029 q = session.query(DBSource).filter_by(source=source). \
2030 filter(DBSource.version.in_([source_version, orig_source_version]))
2032 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2033 s = get_suite(suite, session)
2035 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2036 considered_suites = [ vc.reference for vc in enhances_vcs ]
2037 considered_suites.append(s)
2039 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2044 # No source found so return not ok
2049 __all__.append('source_exists')
2052 def get_suites_source_in(source, session=None):
2054 Returns list of Suite objects which given C{source} name is in
2057 @param source: DBSource package name to search for
2060 @return: list of Suite objects for the given source
2063 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2065 __all__.append('get_suites_source_in')
2068 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2070 Returns list of DBSource objects for given C{source} name and other parameters
2073 @param source: DBSource package name to search for
2075 @type version: str or None
2076 @param version: DBSource version name to search for or None if not applicable
2078 @type dm_upload_allowed: bool
2079 @param dm_upload_allowed: If None, no effect. If True or False, only
2080 return packages with that dm_upload_allowed setting
2082 @type session: Session
2083 @param session: Optional SQL session object (a temporary one will be
2084 generated if not supplied)
2087 @return: list of DBSource objects for the given name (may be empty)
2090 q = session.query(DBSource).filter_by(source=source)
2092 if version is not None:
2093 q = q.filter_by(version=version)
2095 if dm_upload_allowed is not None:
2096 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2100 __all__.append('get_sources_from_name')
2102 # FIXME: This function fails badly if it finds more than 1 source package and
2103 # its implementation is trivial enough to be inlined.
2105 def get_source_in_suite(source, suite, session=None):
2107 Returns a DBSource object for a combination of C{source} and C{suite}.
2109 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2110 - B{suite} - a suite name, eg. I{unstable}
2112 @type source: string
2113 @param source: source package name
2116 @param suite: the suite name
2119 @return: the version for I{source} in I{suite}
2123 q = get_suite(suite, session).get_sources(source)
2126 except NoResultFound:
2129 __all__.append('get_source_in_suite')
2132 def import_metadata_into_db(obj, session=None):
2134 This routine works on either DBBinary or DBSource objects and imports
2135 their metadata into the database
2137 fields = obj.read_control_fields()
2138 for k in fields.keys():
2141 val = str(fields[k])
2142 except UnicodeEncodeError:
2143 # Fall back to UTF-8
2145 val = fields[k].encode('utf-8')
2146 except UnicodeEncodeError:
2147 # Finally try iso8859-1
2148 val = fields[k].encode('iso8859-1')
2149 # Otherwise we allow the exception to percolate up and we cause
2150 # a reject as someone is playing silly buggers
2152 obj.metadata[get_or_set_metadatakey(k, session)] = val
2154 session.commit_or_flush()
2156 __all__.append('import_metadata_into_db')
2158 ################################################################################
2160 class SourceACL(object):
2161 def __init__(self, *args, **kwargs):
2165 return '<SourceACL %s>' % self.source_acl_id
2167 __all__.append('SourceACL')
2169 ################################################################################
2171 class SrcFormat(object):
2172 def __init__(self, *args, **kwargs):
2176 return '<SrcFormat %s>' % (self.format_name)
2178 __all__.append('SrcFormat')
2180 ################################################################################
2182 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2183 ('SuiteID', 'suite_id'),
2184 ('Version', 'version'),
2185 ('Origin', 'origin'),
2187 ('Description', 'description'),
2188 ('Untouchable', 'untouchable'),
2189 ('Announce', 'announce'),
2190 ('Codename', 'codename'),
2191 ('OverrideCodename', 'overridecodename'),
2192 ('ValidTime', 'validtime'),
2193 ('Priority', 'priority'),
2194 ('NotAutomatic', 'notautomatic'),
2195 ('CopyChanges', 'copychanges'),
2196 ('OverrideSuite', 'overridesuite')]
2198 # Why the heck don't we have any UNIQUE constraints in table suite?
2199 # TODO: Add UNIQUE constraints for appropriate columns.
2200 class Suite(ORMObject):
2201 def __init__(self, suite_name = None, version = None):
2202 self.suite_name = suite_name
2203 self.version = version
2205 def properties(self):
2206 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2209 def not_null_constraints(self):
2210 return ['suite_name']
2212 def __eq__(self, val):
2213 if isinstance(val, str):
2214 return (self.suite_name == val)
2215 # This signals to use the normal comparison operator
2216 return NotImplemented
2218 def __ne__(self, val):
2219 if isinstance(val, str):
2220 return (self.suite_name != val)
2221 # This signals to use the normal comparison operator
2222 return NotImplemented
2226 for disp, field in SUITE_FIELDS:
2227 val = getattr(self, field, None)
2229 ret.append("%s: %s" % (disp, val))
2231 return "\n".join(ret)
2233 def get_architectures(self, skipsrc=False, skipall=False):
2235 Returns list of Architecture objects
2237 @type skipsrc: boolean
2238 @param skipsrc: Whether to skip returning the 'source' architecture entry
2241 @type skipall: boolean
2242 @param skipall: Whether to skip returning the 'all' architecture entry
2246 @return: list of Architecture objects for the given name (may be empty)
2249 q = object_session(self).query(Architecture).with_parent(self)
2251 q = q.filter(Architecture.arch_string != 'source')
2253 q = q.filter(Architecture.arch_string != 'all')
2254 return q.order_by(Architecture.arch_string).all()
2256 def get_sources(self, source):
2258 Returns a query object representing DBSource that is part of C{suite}.
2260 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2262 @type source: string
2263 @param source: source package name
2265 @rtype: sqlalchemy.orm.query.Query
2266 @return: a query of DBSource
2270 session = object_session(self)
2271 return session.query(DBSource).filter_by(source = source). \
2274 def get_overridesuite(self):
2275 if self.overridesuite is None:
2278 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2282 return os.path.join(self.archive.path, 'dists', self.suite_name)
2284 __all__.append('Suite')
2287 def get_suite(suite, session=None):
2289 Returns Suite object for given C{suite name}.
2292 @param suite: The name of the suite
2294 @type session: Session
2295 @param session: Optional SQLA session object (a temporary one will be
2296 generated if not supplied)
2299 @return: Suite object for the requested suite name (None if not present)
2302 q = session.query(Suite).filter_by(suite_name=suite)
2306 except NoResultFound:
2309 __all__.append('get_suite')
2311 ################################################################################
2314 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2316 Returns list of Architecture objects for given C{suite} name. The list is
2317 empty if suite does not exist.
2320 @param suite: Suite name to search for
2322 @type skipsrc: boolean
2323 @param skipsrc: Whether to skip returning the 'source' architecture entry
2326 @type skipall: boolean
2327 @param skipall: Whether to skip returning the 'all' architecture entry
2330 @type session: Session
2331 @param session: Optional SQL session object (a temporary one will be
2332 generated if not supplied)
2335 @return: list of Architecture objects for the given name (may be empty)
2339 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2340 except AttributeError:
2343 __all__.append('get_suite_architectures')
2345 ################################################################################
2347 class Uid(ORMObject):
2348 def __init__(self, uid = None, name = None):
2352 def __eq__(self, val):
2353 if isinstance(val, str):
2354 return (self.uid == val)
2355 # This signals to use the normal comparison operator
2356 return NotImplemented
2358 def __ne__(self, val):
2359 if isinstance(val, str):
2360 return (self.uid != val)
2361 # This signals to use the normal comparison operator
2362 return NotImplemented
2364 def properties(self):
2365 return ['uid', 'name', 'fingerprint']
2367 def not_null_constraints(self):
2370 __all__.append('Uid')
2373 def get_or_set_uid(uidname, session=None):
2375 Returns uid object for given uidname.
2377 If no matching uidname is found, a row is inserted.
2379 @type uidname: string
2380 @param uidname: The uid to add
2382 @type session: SQLAlchemy
2383 @param session: Optional SQL session object (a temporary one will be
2384 generated if not supplied). If not passed, a commit will be performed at
2385 the end of the function, otherwise the caller is responsible for commiting.
2388 @return: the uid object for the given uidname
2391 q = session.query(Uid).filter_by(uid=uidname)
2395 except NoResultFound:
2399 session.commit_or_flush()
2404 __all__.append('get_or_set_uid')
2407 def get_uid_from_fingerprint(fpr, session=None):
2408 q = session.query(Uid)
2409 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2413 except NoResultFound:
2416 __all__.append('get_uid_from_fingerprint')
2418 ################################################################################
2420 class UploadBlock(object):
2421 def __init__(self, *args, **kwargs):
2425 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2427 __all__.append('UploadBlock')
2429 ################################################################################
2431 class MetadataKey(ORMObject):
2432 def __init__(self, key = None):
2435 def properties(self):
2438 def not_null_constraints(self):
2441 __all__.append('MetadataKey')
2444 def get_or_set_metadatakey(keyname, session=None):
2446 Returns MetadataKey object for given uidname.
2448 If no matching keyname is found, a row is inserted.
2450 @type uidname: string
2451 @param uidname: The keyname to add
2453 @type session: SQLAlchemy
2454 @param session: Optional SQL session object (a temporary one will be
2455 generated if not supplied). If not passed, a commit will be performed at
2456 the end of the function, otherwise the caller is responsible for commiting.
2459 @return: the metadatakey object for the given keyname
2462 q = session.query(MetadataKey).filter_by(key=keyname)
2466 except NoResultFound:
2467 ret = MetadataKey(keyname)
2469 session.commit_or_flush()
2473 __all__.append('get_or_set_metadatakey')
2475 ################################################################################
2477 class BinaryMetadata(ORMObject):
2478 def __init__(self, key = None, value = None, binary = None):
2481 self.binary = binary
2483 def properties(self):
2484 return ['binary', 'key', 'value']
2486 def not_null_constraints(self):
2489 __all__.append('BinaryMetadata')
2491 ################################################################################
2493 class SourceMetadata(ORMObject):
2494 def __init__(self, key = None, value = None, source = None):
2497 self.source = source
2499 def properties(self):
2500 return ['source', 'key', 'value']
2502 def not_null_constraints(self):
2505 __all__.append('SourceMetadata')
2507 ################################################################################
2509 class VersionCheck(ORMObject):
2510 def __init__(self, *args, **kwargs):
2513 def properties(self):
2514 #return ['suite_id', 'check', 'reference_id']
2517 def not_null_constraints(self):
2518 return ['suite', 'check', 'reference']
2520 __all__.append('VersionCheck')
2523 def get_version_checks(suite_name, check = None, session = None):
2524 suite = get_suite(suite_name, session)
2526 # Make sure that what we return is iterable so that list comprehensions
2527 # involving this don't cause a traceback
2529 q = session.query(VersionCheck).filter_by(suite=suite)
2531 q = q.filter_by(check=check)
2534 __all__.append('get_version_checks')
2536 ################################################################################
2538 class DBConn(object):
2540 database module init.
2544 def __init__(self, *args, **kwargs):
2545 self.__dict__ = self.__shared_state
2547 if not getattr(self, 'initialised', False):
2548 self.initialised = True
2549 self.debug = kwargs.has_key('debug')
2552 def __setuptables(self):
2559 'binaries_metadata',
2568 'external_overrides',
2569 'extra_src_references',
2571 'files_archive_map',
2578 # TODO: the maintainer column in table override should be removed.
2582 'policy_queue_upload',
2583 'policy_queue_upload_binaries_map',
2584 'policy_queue_byhand_file',
2595 'suite_architectures',
2596 'suite_build_queue_copy',
2597 'suite_src_formats',
2604 'almost_obsolete_all_associations',
2605 'almost_obsolete_src_associations',
2606 'any_associations_source',
2607 'bin_associations_binaries',
2608 'binaries_suite_arch',
2611 'newest_all_associations',
2612 'newest_any_associations',
2614 'newest_src_association',
2615 'obsolete_all_associations',
2616 'obsolete_any_associations',
2617 'obsolete_any_by_all_associations',
2618 'obsolete_src_associations',
2620 'src_associations_bin',
2621 'src_associations_src',
2622 'suite_arch_by_name',
2625 for table_name in tables:
2626 table = Table(table_name, self.db_meta, \
2627 autoload=True, useexisting=True)
2628 setattr(self, 'tbl_%s' % table_name, table)
2630 for view_name in views:
2631 view = Table(view_name, self.db_meta, autoload=True)
2632 setattr(self, 'view_%s' % view_name, view)
2634 def __setupmappers(self):
2635 mapper(Architecture, self.tbl_architecture,
2636 properties = dict(arch_id = self.tbl_architecture.c.id,
2637 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2638 order_by=self.tbl_suite.c.suite_name,
2639 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2640 extension = validator)
2642 mapper(Archive, self.tbl_archive,
2643 properties = dict(archive_id = self.tbl_archive.c.id,
2644 archive_name = self.tbl_archive.c.name))
2646 mapper(ArchiveFile, self.tbl_files_archive_map,
2647 properties = dict(archive = relation(Archive, backref='files'),
2648 component = relation(Component),
2649 file = relation(PoolFile, backref='archives')))
2651 mapper(BuildQueue, self.tbl_build_queue,
2652 properties = dict(queue_id = self.tbl_build_queue.c.id,
2653 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2655 mapper(DBBinary, self.tbl_binaries,
2656 properties = dict(binary_id = self.tbl_binaries.c.id,
2657 package = self.tbl_binaries.c.package,
2658 version = self.tbl_binaries.c.version,
2659 maintainer_id = self.tbl_binaries.c.maintainer,
2660 maintainer = relation(Maintainer),
2661 source_id = self.tbl_binaries.c.source,
2662 source = relation(DBSource, backref='binaries'),
2663 arch_id = self.tbl_binaries.c.architecture,
2664 architecture = relation(Architecture),
2665 poolfile_id = self.tbl_binaries.c.file,
2666 poolfile = relation(PoolFile),
2667 binarytype = self.tbl_binaries.c.type,
2668 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2669 fingerprint = relation(Fingerprint),
2670 install_date = self.tbl_binaries.c.install_date,
2671 suites = relation(Suite, secondary=self.tbl_bin_associations,
2672 backref=backref('binaries', lazy='dynamic')),
2673 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2674 backref=backref('extra_binary_references', lazy='dynamic')),
2675 key = relation(BinaryMetadata, cascade='all',
2676 collection_class=attribute_mapped_collection('key'))),
2677 extension = validator)
2679 mapper(BinaryACL, self.tbl_binary_acl,
2680 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2682 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2683 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2684 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2685 architecture = relation(Architecture)))
2687 mapper(Component, self.tbl_component,
2688 properties = dict(component_id = self.tbl_component.c.id,
2689 component_name = self.tbl_component.c.name),
2690 extension = validator)
2692 mapper(DBConfig, self.tbl_config,
2693 properties = dict(config_id = self.tbl_config.c.id))
2695 mapper(DSCFile, self.tbl_dsc_files,
2696 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2697 source_id = self.tbl_dsc_files.c.source,
2698 source = relation(DBSource),
2699 poolfile_id = self.tbl_dsc_files.c.file,
2700 poolfile = relation(PoolFile)))
2702 mapper(ExternalOverride, self.tbl_external_overrides,
2704 suite_id = self.tbl_external_overrides.c.suite,
2705 suite = relation(Suite),
2706 component_id = self.tbl_external_overrides.c.component,
2707 component = relation(Component)))
2709 mapper(PoolFile, self.tbl_files,
2710 properties = dict(file_id = self.tbl_files.c.id,
2711 filesize = self.tbl_files.c.size),
2712 extension = validator)
2714 mapper(Fingerprint, self.tbl_fingerprint,
2715 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2716 uid_id = self.tbl_fingerprint.c.uid,
2717 uid = relation(Uid),
2718 keyring_id = self.tbl_fingerprint.c.keyring,
2719 keyring = relation(Keyring),
2720 source_acl = relation(SourceACL),
2721 binary_acl = relation(BinaryACL)),
2722 extension = validator)
2724 mapper(Keyring, self.tbl_keyrings,
2725 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2726 keyring_id = self.tbl_keyrings.c.id))
2728 mapper(DBChange, self.tbl_changes,
2729 properties = dict(change_id = self.tbl_changes.c.id,
2730 seen = self.tbl_changes.c.seen,
2731 source = self.tbl_changes.c.source,
2732 binaries = self.tbl_changes.c.binaries,
2733 architecture = self.tbl_changes.c.architecture,
2734 distribution = self.tbl_changes.c.distribution,
2735 urgency = self.tbl_changes.c.urgency,
2736 maintainer = self.tbl_changes.c.maintainer,
2737 changedby = self.tbl_changes.c.changedby,
2738 date = self.tbl_changes.c.date,
2739 version = self.tbl_changes.c.version))
2741 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
2742 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
2743 keyring = relation(Keyring, backref="keyring_acl_map"),
2744 architecture = relation(Architecture)))
2746 mapper(Maintainer, self.tbl_maintainer,
2747 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2748 maintains_sources = relation(DBSource, backref='maintainer',
2749 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2750 changed_sources = relation(DBSource, backref='changedby',
2751 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2752 extension = validator)
2754 mapper(NewComment, self.tbl_new_comments,
2755 properties = dict(comment_id = self.tbl_new_comments.c.id))
2757 mapper(Override, self.tbl_override,
2758 properties = dict(suite_id = self.tbl_override.c.suite,
2759 suite = relation(Suite, \
2760 backref=backref('overrides', lazy='dynamic')),
2761 package = self.tbl_override.c.package,
2762 component_id = self.tbl_override.c.component,
2763 component = relation(Component, \
2764 backref=backref('overrides', lazy='dynamic')),
2765 priority_id = self.tbl_override.c.priority,
2766 priority = relation(Priority, \
2767 backref=backref('overrides', lazy='dynamic')),
2768 section_id = self.tbl_override.c.section,
2769 section = relation(Section, \
2770 backref=backref('overrides', lazy='dynamic')),
2771 overridetype_id = self.tbl_override.c.type,
2772 overridetype = relation(OverrideType, \
2773 backref=backref('overrides', lazy='dynamic'))))
2775 mapper(OverrideType, self.tbl_override_type,
2776 properties = dict(overridetype = self.tbl_override_type.c.type,
2777 overridetype_id = self.tbl_override_type.c.id))
2779 mapper(PolicyQueue, self.tbl_policy_queue,
2780 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2781 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2783 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2785 changes = relation(DBChange),
2786 policy_queue = relation(PolicyQueue, backref='uploads'),
2787 target_suite = relation(Suite),
2788 source = relation(DBSource),
2789 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2792 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2794 upload = relation(PolicyQueueUpload, backref='byhand'),
2798 mapper(Priority, self.tbl_priority,
2799 properties = dict(priority_id = self.tbl_priority.c.id))
2801 mapper(Section, self.tbl_section,
2802 properties = dict(section_id = self.tbl_section.c.id,
2803 section=self.tbl_section.c.section))
2805 mapper(DBSource, self.tbl_source,
2806 properties = dict(source_id = self.tbl_source.c.id,
2807 version = self.tbl_source.c.version,
2808 maintainer_id = self.tbl_source.c.maintainer,
2809 poolfile_id = self.tbl_source.c.file,
2810 poolfile = relation(PoolFile),
2811 fingerprint_id = self.tbl_source.c.sig_fpr,
2812 fingerprint = relation(Fingerprint),
2813 changedby_id = self.tbl_source.c.changedby,
2814 srcfiles = relation(DSCFile,
2815 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2816 suites = relation(Suite, secondary=self.tbl_src_associations,
2817 backref=backref('sources', lazy='dynamic')),
2818 uploaders = relation(Maintainer,
2819 secondary=self.tbl_src_uploaders),
2820 key = relation(SourceMetadata, cascade='all',
2821 collection_class=attribute_mapped_collection('key'))),
2822 extension = validator)
2824 mapper(SourceACL, self.tbl_source_acl,
2825 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
2827 mapper(SrcFormat, self.tbl_src_format,
2828 properties = dict(src_format_id = self.tbl_src_format.c.id,
2829 format_name = self.tbl_src_format.c.format_name))
2831 mapper(Suite, self.tbl_suite,
2832 properties = dict(suite_id = self.tbl_suite.c.id,
2833 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2834 copy_queues = relation(BuildQueue,
2835 secondary=self.tbl_suite_build_queue_copy),
2836 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2837 backref=backref('suites', lazy='dynamic')),
2838 archive = relation(Archive, backref='suites')),
2839 extension = validator)
2841 mapper(Uid, self.tbl_uid,
2842 properties = dict(uid_id = self.tbl_uid.c.id,
2843 fingerprint = relation(Fingerprint)),
2844 extension = validator)
2846 mapper(UploadBlock, self.tbl_upload_blocks,
2847 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
2848 fingerprint = relation(Fingerprint, backref="uploadblocks"),
2849 uid = relation(Uid, backref="uploadblocks")))
2851 mapper(BinContents, self.tbl_bin_contents,
2853 binary = relation(DBBinary,
2854 backref=backref('contents', lazy='dynamic', cascade='all')),
2855 file = self.tbl_bin_contents.c.file))
2857 mapper(SrcContents, self.tbl_src_contents,
2859 source = relation(DBSource,
2860 backref=backref('contents', lazy='dynamic', cascade='all')),
2861 file = self.tbl_src_contents.c.file))
2863 mapper(MetadataKey, self.tbl_metadata_keys,
2865 key_id = self.tbl_metadata_keys.c.key_id,
2866 key = self.tbl_metadata_keys.c.key))
2868 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2870 binary_id = self.tbl_binaries_metadata.c.bin_id,
2871 binary = relation(DBBinary),
2872 key_id = self.tbl_binaries_metadata.c.key_id,
2873 key = relation(MetadataKey),
2874 value = self.tbl_binaries_metadata.c.value))
2876 mapper(SourceMetadata, self.tbl_source_metadata,
2878 source_id = self.tbl_source_metadata.c.src_id,
2879 source = relation(DBSource),
2880 key_id = self.tbl_source_metadata.c.key_id,
2881 key = relation(MetadataKey),
2882 value = self.tbl_source_metadata.c.value))
2884 mapper(VersionCheck, self.tbl_version_check,
2886 suite_id = self.tbl_version_check.c.suite,
2887 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2888 reference_id = self.tbl_version_check.c.reference,
2889 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2891 ## Connection functions
2892 def __createconn(self):
2893 from config import Config
2895 if cnf.has_key("DB::Service"):
2896 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2897 elif cnf.has_key("DB::Host"):
2899 connstr = "postgresql://%s" % cnf["DB::Host"]
2900 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2901 connstr += ":%s" % cnf["DB::Port"]
2902 connstr += "/%s" % cnf["DB::Name"]
2905 connstr = "postgresql:///%s" % cnf["DB::Name"]
2906 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2907 connstr += "?port=%s" % cnf["DB::Port"]
2909 engine_args = { 'echo': self.debug }
2910 if cnf.has_key('DB::PoolSize'):
2911 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2912 if cnf.has_key('DB::MaxOverflow'):
2913 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2914 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
2915 cnf['DB::Unicode'] == 'false':
2916 engine_args['use_native_unicode'] = False
2918 # Monkey patch a new dialect in in order to support service= syntax
2919 import sqlalchemy.dialects.postgresql
2920 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2921 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2922 def create_connect_args(self, url):
2923 if str(url).startswith('postgresql://service='):
2925 servicename = str(url)[21:]
2926 return (['service=%s' % servicename], {})
2928 return PGDialect_psycopg2.create_connect_args(self, url)
2930 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2933 self.db_pg = create_engine(connstr, **engine_args)
2934 self.db_meta = MetaData()
2935 self.db_meta.bind = self.db_pg
2936 self.db_smaker = sessionmaker(bind=self.db_pg,
2940 self.__setuptables()
2941 self.__setupmappers()
2943 except OperationalError as e:
2945 utils.fubar("Cannot connect to database (%s)" % str(e))
2947 self.pid = os.getpid()
2949 def session(self, work_mem = 0):
2951 Returns a new session object. If a work_mem parameter is provided a new
2952 transaction is started and the work_mem parameter is set for this
2953 transaction. The work_mem parameter is measured in MB. A default value
2954 will be used if the parameter is not set.
2956 # reinitialize DBConn in new processes
2957 if self.pid != os.getpid():
2960 session = self.db_smaker()
2962 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2965 __all__.append('DBConn')