5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 import daklib.daksubprocess
39 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from tarfile import TarFile
57 from inspect import getargspec
60 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
62 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
63 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
64 from sqlalchemy import types as sqltypes
65 from sqlalchemy.orm.collections import attribute_mapped_collection
66 from sqlalchemy.ext.associationproxy import association_proxy
68 # Don't remove this, we re-export the exceptions to scripts which import us
69 from sqlalchemy.exc import *
70 from sqlalchemy.orm.exc import NoResultFound
72 # Only import Config until Queue stuff is changed to store its config
74 from config import Config
75 from textutils import fix_maintainer
76 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
78 # suppress some deprecation warnings in squeeze related to sqlalchemy
80 warnings.filterwarnings('ignore', \
81 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
83 warnings.filterwarnings('ignore', \
84 "Predicate of partial index .* ignored during reflection", \
88 ################################################################################
90 # Patch in support for the debversion field type so that it works during
94 # that is for sqlalchemy 0.6
95 UserDefinedType = sqltypes.UserDefinedType
97 # this one for sqlalchemy 0.5
98 UserDefinedType = sqltypes.TypeEngine
100 class DebVersion(UserDefinedType):
101 def get_col_spec(self):
104 def bind_processor(self, dialect):
107 # ' = None' is needed for sqlalchemy 0.5:
108 def result_processor(self, dialect, coltype = None):
111 sa_major_version = sqlalchemy.__version__[0:3]
112 if sa_major_version in ["0.5", "0.6", "0.7", "0.8", "0.9"]:
113 from sqlalchemy.databases import postgres
114 postgres.ischema_names['debversion'] = DebVersion
116 raise Exception("dak only ported to SQLA versions 0.5 to 0.9. See daklib/dbconn.py")
118 ################################################################################
120 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
122 ################################################################################
124 def session_wrapper(fn):
126 Wrapper around common ".., session=None):" handling. If the wrapped
127 function is called without passing 'session', we create a local one
128 and destroy it when the function ends.
130 Also attaches a commit_or_flush method to the session; if we created a
131 local session, this is a synonym for session.commit(), otherwise it is a
132 synonym for session.flush().
135 def wrapped(*args, **kwargs):
136 private_transaction = False
138 # Find the session object
139 session = kwargs.get('session')
142 if len(args) <= len(getargspec(fn)[0]) - 1:
143 # No session specified as last argument or in kwargs
144 private_transaction = True
145 session = kwargs['session'] = DBConn().session()
147 # Session is last argument in args
151 session = args[-1] = DBConn().session()
152 private_transaction = True
154 if private_transaction:
155 session.commit_or_flush = session.commit
157 session.commit_or_flush = session.flush
160 return fn(*args, **kwargs)
162 if private_transaction:
163 # We created a session; close it.
166 wrapped.__doc__ = fn.__doc__
167 wrapped.func_name = fn.func_name
171 __all__.append('session_wrapper')
173 ################################################################################
175 class ORMObject(object):
177 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
178 derived classes must implement the properties() method.
181 def properties(self):
183 This method should be implemented by all derived classes and returns a
184 list of the important properties. The properties 'created' and
185 'modified' will be added automatically. A suffix '_count' should be
186 added to properties that are lists or query objects. The most important
187 property name should be returned as the first element in the list
188 because it is used by repr().
194 Returns a JSON representation of the object based on the properties
195 returned from the properties() method.
198 # add created and modified
199 all_properties = self.properties() + ['created', 'modified']
200 for property in all_properties:
201 # check for list or query
202 if property[-6:] == '_count':
203 real_property = property[:-6]
204 if not hasattr(self, real_property):
206 value = getattr(self, real_property)
207 if hasattr(value, '__len__'):
210 elif hasattr(value, 'count'):
211 # query (but not during validation)
212 if self.in_validation:
214 value = value.count()
216 raise KeyError('Do not understand property %s.' % property)
218 if not hasattr(self, property):
221 value = getattr(self, property)
225 elif isinstance(value, ORMObject):
226 # use repr() for ORMObject types
229 # we want a string for all other types because json cannot
232 data[property] = value
233 return json.dumps(data)
237 Returns the name of the class.
239 return type(self).__name__
243 Returns a short string representation of the object using the first
244 element from the properties() method.
246 primary_property = self.properties()[0]
247 value = getattr(self, primary_property)
248 return '<%s %s>' % (self.classname(), str(value))
252 Returns a human readable form of the object using the properties()
255 return '<%s %s>' % (self.classname(), self.json())
257 def not_null_constraints(self):
259 Returns a list of properties that must be not NULL. Derived classes
260 should override this method if needed.
264 validation_message = \
265 "Validation failed because property '%s' must not be empty in object\n%s"
267 in_validation = False
271 This function validates the not NULL constraints as returned by
272 not_null_constraints(). It raises the DBUpdateError exception if
275 for property in self.not_null_constraints():
276 # TODO: It is a bit awkward that the mapper configuration allow
277 # directly setting the numeric _id columns. We should get rid of it
279 if hasattr(self, property + '_id') and \
280 getattr(self, property + '_id') is not None:
282 if not hasattr(self, property) or getattr(self, property) is None:
283 # str() might lead to races due to a 2nd flush
284 self.in_validation = True
285 message = self.validation_message % (property, str(self))
286 self.in_validation = False
287 raise DBUpdateError(message)
291 def get(cls, primary_key, session = None):
293 This is a support function that allows getting an object by its primary
296 Architecture.get(3[, session])
298 instead of the more verbose
300 session.query(Architecture).get(3)
302 return session.query(cls).get(primary_key)
304 def session(self, replace = False):
306 Returns the current session that is associated with the object. May
307 return None is object is in detached state.
310 return object_session(self)
312 def clone(self, session = None):
314 Clones the current object in a new session and returns the new clone. A
315 fresh session is created if the optional session parameter is not
316 provided. The function will fail if a session is provided and has
319 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
320 an existing object to allow several threads to work with their own
321 instances of an ORMObject.
323 WARNING: Only persistent (committed) objects can be cloned. Changes
324 made to the original object that are not committed yet will get lost.
325 The session of the new object will always be rolled back to avoid
329 if self.session() is None:
330 raise RuntimeError( \
331 'Method clone() failed for detached object:\n%s' % self)
332 self.session().flush()
333 mapper = object_mapper(self)
334 primary_key = mapper.primary_key_from_instance(self)
335 object_class = self.__class__
337 session = DBConn().session()
338 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
339 raise RuntimeError( \
340 'Method clone() failed due to unflushed changes in session.')
341 new_object = session.query(object_class).get(primary_key)
343 if new_object is None:
344 raise RuntimeError( \
345 'Method clone() failed for non-persistent object:\n%s' % self)
348 __all__.append('ORMObject')
350 ################################################################################
352 class Validator(MapperExtension):
354 This class calls the validate() method for each instance for the
355 'before_update' and 'before_insert' events. A global object validator is
356 used for configuring the individual mappers.
359 def before_update(self, mapper, connection, instance):
363 def before_insert(self, mapper, connection, instance):
367 validator = Validator()
369 ################################################################################
371 class ACL(ORMObject):
373 return "<ACL {0}>".format(self.name)
375 __all__.append('ACL')
377 class ACLPerSource(ORMObject):
379 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
381 __all__.append('ACLPerSource')
383 ################################################################################
385 class Architecture(ORMObject):
386 def __init__(self, arch_string = None, description = None):
387 self.arch_string = arch_string
388 self.description = description
390 def __eq__(self, val):
391 if isinstance(val, str):
392 return (self.arch_string== val)
393 # This signals to use the normal comparison operator
394 return NotImplemented
396 def __ne__(self, val):
397 if isinstance(val, str):
398 return (self.arch_string != val)
399 # This signals to use the normal comparison operator
400 return NotImplemented
402 def properties(self):
403 return ['arch_string', 'arch_id', 'suites_count']
405 def not_null_constraints(self):
406 return ['arch_string']
408 __all__.append('Architecture')
411 def get_architecture(architecture, session=None):
413 Returns database id for given C{architecture}.
415 @type architecture: string
416 @param architecture: The name of the architecture
418 @type session: Session
419 @param session: Optional SQLA session object (a temporary one will be
420 generated if not supplied)
423 @return: Architecture object for the given arch (None if not present)
426 q = session.query(Architecture).filter_by(arch_string=architecture)
430 except NoResultFound:
433 __all__.append('get_architecture')
435 ################################################################################
437 class Archive(object):
438 def __init__(self, *args, **kwargs):
442 return '<Archive %s>' % self.archive_name
444 __all__.append('Archive')
447 def get_archive(archive, session=None):
449 returns database id for given C{archive}.
451 @type archive: string
452 @param archive: the name of the arhive
454 @type session: Session
455 @param session: Optional SQLA session object (a temporary one will be
456 generated if not supplied)
459 @return: Archive object for the given name (None if not present)
462 archive = archive.lower()
464 q = session.query(Archive).filter_by(archive_name=archive)
468 except NoResultFound:
471 __all__.append('get_archive')
473 ################################################################################
475 class ArchiveFile(object):
476 def __init__(self, archive=None, component=None, file=None):
477 self.archive = archive
478 self.component = component
482 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
484 __all__.append('ArchiveFile')
486 ################################################################################
488 class BinContents(ORMObject):
489 def __init__(self, file = None, binary = None):
493 def properties(self):
494 return ['file', 'binary']
496 __all__.append('BinContents')
498 ################################################################################
500 class DBBinary(ORMObject):
501 def __init__(self, package = None, source = None, version = None, \
502 maintainer = None, architecture = None, poolfile = None, \
503 binarytype = 'deb', fingerprint=None):
504 self.package = package
506 self.version = version
507 self.maintainer = maintainer
508 self.architecture = architecture
509 self.poolfile = poolfile
510 self.binarytype = binarytype
511 self.fingerprint = fingerprint
515 return self.binary_id
517 def properties(self):
518 return ['package', 'version', 'maintainer', 'source', 'architecture', \
519 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
520 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
522 def not_null_constraints(self):
523 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
526 metadata = association_proxy('key', 'value')
528 def scan_contents(self):
530 Yields the contents of the package. Only regular files are yielded and
531 the path names are normalized after converting them from either utf-8
532 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
533 package does not contain any regular file.
535 fullpath = self.poolfile.fullpath
536 dpkg_cmd = ('dpkg-deb', '--fsys-tarfile', fullpath)
537 dpkg = daklib.daksubprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE)
538 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
539 for member in tar.getmembers():
540 if not member.isdir():
541 name = normpath(member.name)
542 # enforce proper utf-8 encoding
545 except UnicodeDecodeError:
546 name = name.decode('iso8859-1').encode('utf-8')
552 def read_control(self):
554 Reads the control information from a binary.
557 @return: stanza text of the control section.
560 fullpath = self.poolfile.fullpath
561 with open(fullpath, 'r') as deb_file:
562 return utils.deb_extract_control(deb_file)
564 def read_control_fields(self):
566 Reads the control information from a binary and return
570 @return: fields of the control section as a dictionary.
572 stanza = self.read_control()
573 return apt_pkg.TagSection(stanza)
575 __all__.append('DBBinary')
578 def get_suites_binary_in(package, session=None):
580 Returns list of Suite objects which given C{package} name is in
583 @param package: DBBinary package name to search for
586 @return: list of Suite objects for the given package
589 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
591 __all__.append('get_suites_binary_in')
594 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
596 Returns the component name of the newest binary package in suite_list or
597 None if no package is found. The result can be optionally filtered by a list
598 of architecture names.
601 @param package: DBBinary package name to search for
603 @type suite_list: list of str
604 @param suite_list: list of suite_name items
606 @type arch_list: list of str
607 @param arch_list: optional list of arch_string items that defaults to []
609 @rtype: str or NoneType
610 @return: name of component or None
613 q = session.query(DBBinary).filter_by(package = package). \
614 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
615 if len(arch_list) > 0:
616 q = q.join(DBBinary.architecture). \
617 filter(Architecture.arch_string.in_(arch_list))
618 binary = q.order_by(desc(DBBinary.version)).first()
622 return binary.poolfile.component.component_name
624 __all__.append('get_component_by_package_suite')
626 ################################################################################
628 class BuildQueue(object):
629 def __init__(self, *args, **kwargs):
633 return '<BuildQueue %s>' % self.queue_name
635 __all__.append('BuildQueue')
637 ################################################################################
639 class Component(ORMObject):
640 def __init__(self, component_name = None):
641 self.component_name = component_name
643 def __eq__(self, val):
644 if isinstance(val, str):
645 return (self.component_name == val)
646 # This signals to use the normal comparison operator
647 return NotImplemented
649 def __ne__(self, val):
650 if isinstance(val, str):
651 return (self.component_name != val)
652 # This signals to use the normal comparison operator
653 return NotImplemented
655 def properties(self):
656 return ['component_name', 'component_id', 'description', \
657 'meets_dfsg', 'overrides_count']
659 def not_null_constraints(self):
660 return ['component_name']
663 __all__.append('Component')
666 def get_component(component, session=None):
668 Returns database id for given C{component}.
670 @type component: string
671 @param component: The name of the override type
674 @return: the database id for the given component
677 component = component.lower()
679 q = session.query(Component).filter_by(component_name=component)
683 except NoResultFound:
686 __all__.append('get_component')
689 def get_mapped_component(component_name, session=None):
690 """get component after mappings
692 Evaluate component mappings from ComponentMappings in dak.conf for the
693 given component name.
695 @todo: ansgar wants to get rid of this. It's currently only used for
698 @type component_name: str
699 @param component_name: component name
701 @param session: database session
703 @rtype: L{daklib.dbconn.Component} or C{None}
704 @return: component after applying maps or C{None}
707 for m in cnf.value_list("ComponentMappings"):
708 (src, dst) = m.split()
709 if component_name == src:
711 component = session.query(Component).filter_by(component_name=component_name).first()
714 __all__.append('get_mapped_component')
717 def get_component_names(session=None):
719 Returns list of strings of component names.
722 @return: list of strings of component names
725 return [ x.component_name for x in session.query(Component).all() ]
727 __all__.append('get_component_names')
729 ################################################################################
731 class DBConfig(object):
732 def __init__(self, *args, **kwargs):
736 return '<DBConfig %s>' % self.name
738 __all__.append('DBConfig')
740 ################################################################################
743 def get_or_set_contents_file_id(filename, session=None):
745 Returns database id for given filename.
747 If no matching file is found, a row is inserted.
749 @type filename: string
750 @param filename: The filename
751 @type session: SQLAlchemy
752 @param session: Optional SQL session object (a temporary one will be
753 generated if not supplied). If not passed, a commit will be performed at
754 the end of the function, otherwise the caller is responsible for commiting.
757 @return: the database id for the given component
760 q = session.query(ContentFilename).filter_by(filename=filename)
763 ret = q.one().cafilename_id
764 except NoResultFound:
765 cf = ContentFilename()
766 cf.filename = filename
768 session.commit_or_flush()
769 ret = cf.cafilename_id
773 __all__.append('get_or_set_contents_file_id')
776 def get_contents(suite, overridetype, section=None, session=None):
778 Returns contents for a suite / overridetype combination, limiting
779 to a section if not None.
782 @param suite: Suite object
784 @type overridetype: OverrideType
785 @param overridetype: OverrideType object
787 @type section: Section
788 @param section: Optional section object to limit results to
790 @type session: SQLAlchemy
791 @param session: Optional SQL session object (a temporary one will be
792 generated if not supplied)
795 @return: ResultsProxy object set up to return tuples of (filename, section,
799 # find me all of the contents for a given suite
800 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
804 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
805 JOIN content_file_names n ON (c.filename=n.id)
806 JOIN binaries b ON (b.id=c.binary_pkg)
807 JOIN override o ON (o.package=b.package)
808 JOIN section s ON (s.id=o.section)
809 WHERE o.suite = :suiteid AND o.type = :overridetypeid
810 AND b.type=:overridetypename"""
812 vals = {'suiteid': suite.suite_id,
813 'overridetypeid': overridetype.overridetype_id,
814 'overridetypename': overridetype.overridetype}
816 if section is not None:
817 contents_q += " AND s.id = :sectionid"
818 vals['sectionid'] = section.section_id
820 contents_q += " ORDER BY fn"
822 return session.execute(contents_q, vals)
824 __all__.append('get_contents')
826 ################################################################################
828 class ContentFilepath(object):
829 def __init__(self, *args, **kwargs):
833 return '<ContentFilepath %s>' % self.filepath
835 __all__.append('ContentFilepath')
838 def get_or_set_contents_path_id(filepath, session=None):
840 Returns database id for given path.
842 If no matching file is found, a row is inserted.
844 @type filepath: string
845 @param filepath: The filepath
847 @type session: SQLAlchemy
848 @param session: Optional SQL session object (a temporary one will be
849 generated if not supplied). If not passed, a commit will be performed at
850 the end of the function, otherwise the caller is responsible for commiting.
853 @return: the database id for the given path
856 q = session.query(ContentFilepath).filter_by(filepath=filepath)
859 ret = q.one().cafilepath_id
860 except NoResultFound:
861 cf = ContentFilepath()
862 cf.filepath = filepath
864 session.commit_or_flush()
865 ret = cf.cafilepath_id
869 __all__.append('get_or_set_contents_path_id')
871 ################################################################################
873 class ContentAssociation(object):
874 def __init__(self, *args, **kwargs):
878 return '<ContentAssociation %s>' % self.ca_id
880 __all__.append('ContentAssociation')
882 def insert_content_paths(binary_id, fullpaths, session=None):
884 Make sure given path is associated with given binary id
887 @param binary_id: the id of the binary
888 @type fullpaths: list
889 @param fullpaths: the list of paths of the file being associated with the binary
890 @type session: SQLAlchemy session
891 @param session: Optional SQLAlchemy session. If this is passed, the caller
892 is responsible for ensuring a transaction has begun and committing the
893 results or rolling back based on the result code. If not passed, a commit
894 will be performed at the end of the function, otherwise the caller is
895 responsible for commiting.
897 @return: True upon success
902 session = DBConn().session()
907 def generate_path_dicts():
908 for fullpath in fullpaths:
909 if fullpath.startswith( './' ):
910 fullpath = fullpath[2:]
912 yield {'filename':fullpath, 'id': binary_id }
914 for d in generate_path_dicts():
915 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
924 traceback.print_exc()
926 # Only rollback if we set up the session ourself
933 __all__.append('insert_content_paths')
935 ################################################################################
937 class DSCFile(object):
938 def __init__(self, *args, **kwargs):
942 return '<DSCFile %s>' % self.dscfile_id
944 __all__.append('DSCFile')
947 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
949 Returns a list of DSCFiles which may be empty
951 @type dscfile_id: int (optional)
952 @param dscfile_id: the dscfile_id of the DSCFiles to find
954 @type source_id: int (optional)
955 @param source_id: the source id related to the DSCFiles to find
957 @type poolfile_id: int (optional)
958 @param poolfile_id: the poolfile id related to the DSCFiles to find
961 @return: Possibly empty list of DSCFiles
964 q = session.query(DSCFile)
966 if dscfile_id is not None:
967 q = q.filter_by(dscfile_id=dscfile_id)
969 if source_id is not None:
970 q = q.filter_by(source_id=source_id)
972 if poolfile_id is not None:
973 q = q.filter_by(poolfile_id=poolfile_id)
977 __all__.append('get_dscfiles')
979 ################################################################################
981 class ExternalOverride(ORMObject):
982 def __init__(self, *args, **kwargs):
986 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
988 __all__.append('ExternalOverride')
990 ################################################################################
992 class PoolFile(ORMObject):
993 def __init__(self, filename = None, filesize = -1, \
995 self.filename = filename
996 self.filesize = filesize
1001 session = DBConn().session().object_session(self)
1002 af = session.query(ArchiveFile).join(Archive) \
1003 .filter(ArchiveFile.file == self) \
1004 .order_by(Archive.tainted.desc()).first()
1008 def component(self):
1009 session = DBConn().session().object_session(self)
1010 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1011 .group_by(ArchiveFile.component_id).one()
1012 return session.query(Component).get(component_id)
1016 return os.path.basename(self.filename)
1018 def is_valid(self, filesize = -1, md5sum = None):
1019 return self.filesize == long(filesize) and self.md5sum == md5sum
1021 def properties(self):
1022 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1023 'sha256sum', 'source', 'binary', 'last_used']
1025 def not_null_constraints(self):
1026 return ['filename', 'md5sum']
1028 def identical_to(self, filename):
1030 compare size and hash with the given file
1033 @return: true if the given file has the same size and hash as this object; false otherwise
1035 st = os.stat(filename)
1036 if self.filesize != st.st_size:
1039 f = open(filename, "r")
1040 sha256sum = apt_pkg.sha256sum(f)
1041 if sha256sum != self.sha256sum:
1046 __all__.append('PoolFile')
1049 def get_poolfile_like_name(filename, session=None):
1051 Returns an array of PoolFile objects which are like the given name
1053 @type filename: string
1054 @param filename: the filename of the file to check against the DB
1057 @return: array of PoolFile objects
1060 # TODO: There must be a way of properly using bind parameters with %FOO%
1061 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1065 __all__.append('get_poolfile_like_name')
1067 ################################################################################
1069 class Fingerprint(ORMObject):
1070 def __init__(self, fingerprint = None):
1071 self.fingerprint = fingerprint
1073 def properties(self):
1074 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1077 def not_null_constraints(self):
1078 return ['fingerprint']
1080 __all__.append('Fingerprint')
1083 def get_fingerprint(fpr, session=None):
1085 Returns Fingerprint object for given fpr.
1088 @param fpr: The fpr to find / add
1090 @type session: SQLAlchemy
1091 @param session: Optional SQL session object (a temporary one will be
1092 generated if not supplied).
1095 @return: the Fingerprint object for the given fpr or None
1098 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1102 except NoResultFound:
1107 __all__.append('get_fingerprint')
1110 def get_or_set_fingerprint(fpr, session=None):
1112 Returns Fingerprint object for given fpr.
1114 If no matching fpr is found, a row is inserted.
1117 @param fpr: The fpr to find / add
1119 @type session: SQLAlchemy
1120 @param session: Optional SQL session object (a temporary one will be
1121 generated if not supplied). If not passed, a commit will be performed at
1122 the end of the function, otherwise the caller is responsible for commiting.
1123 A flush will be performed either way.
1126 @return: the Fingerprint object for the given fpr
1129 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1133 except NoResultFound:
1134 fingerprint = Fingerprint()
1135 fingerprint.fingerprint = fpr
1136 session.add(fingerprint)
1137 session.commit_or_flush()
1142 __all__.append('get_or_set_fingerprint')
1144 ################################################################################
1146 # Helper routine for Keyring class
1147 def get_ldap_name(entry):
1149 for k in ["cn", "mn", "sn"]:
1151 if ret and ret[0] != "" and ret[0] != "-":
1153 return " ".join(name)
1155 ################################################################################
1157 class Keyring(object):
1161 def __init__(self, *args, **kwargs):
1165 return '<Keyring %s>' % self.keyring_name
1167 def de_escape_gpg_str(self, txt):
1168 esclist = re.split(r'(\\x..)', txt)
1169 for x in range(1,len(esclist),2):
1170 esclist[x] = "%c" % (int(esclist[x][2:],16))
1171 return "".join(esclist)
1173 def parse_address(self, uid):
1174 """parses uid and returns a tuple of real name and email address"""
1176 (name, address) = email.Utils.parseaddr(uid)
1177 name = re.sub(r"\s*[(].*[)]", "", name)
1178 name = self.de_escape_gpg_str(name)
1181 return (name, address)
1183 def load_keys(self, keyring):
1184 if not self.keyring_id:
1185 raise Exception('Must be initialized with database information')
1187 cmd = ["gpg", "--no-default-keyring", "--keyring", keyring,
1188 "--with-colons", "--fingerprint", "--fingerprint"]
1189 p = daklib.daksubprocess.Popen(cmd, stdout=subprocess.PIPE)
1192 need_fingerprint = False
1194 for line in p.stdout:
1195 field = line.split(":")
1196 if field[0] == "pub":
1199 (name, addr) = self.parse_address(field[9])
1201 self.keys[key]["email"] = addr
1202 self.keys[key]["name"] = name
1203 need_fingerprint = True
1204 elif key and field[0] == "uid":
1205 (name, addr) = self.parse_address(field[9])
1206 if "email" not in self.keys[key] and "@" in addr:
1207 self.keys[key]["email"] = addr
1208 self.keys[key]["name"] = name
1209 elif need_fingerprint and field[0] == "fpr":
1210 self.keys[key]["fingerprints"] = [field[9]]
1211 self.fpr_lookup[field[9]] = key
1212 need_fingerprint = False
1216 raise subprocess.CalledProcessError(r, cmd)
1218 def import_users_from_ldap(self, session):
1222 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1223 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1224 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1226 l = ldap.open(LDAPServer)
1229 # TODO: This should request a new context and use
1230 # connection-specific options (i.e. "l.set_option(...)")
1232 # Request a new TLS context. If there was already one, libldap
1233 # would not change the TLS options (like which CAs to trust).
1234 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1235 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1236 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1237 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1240 l.simple_bind_s("","")
1241 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1242 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1243 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1245 ldap_fin_uid_id = {}
1252 uid = entry["uid"][0]
1253 name = get_ldap_name(entry)
1254 fingerprints = entry["keyFingerPrint"]
1256 for f in fingerprints:
1257 key = self.fpr_lookup.get(f, None)
1258 if key not in self.keys:
1260 self.keys[key]["uid"] = uid
1264 keyid = get_or_set_uid(uid, session).uid_id
1265 byuid[keyid] = (uid, name)
1266 byname[uid] = (keyid, name)
1268 return (byname, byuid)
1270 def generate_users_from_keyring(self, format, session):
1274 for x in self.keys.keys():
1275 if "email" not in self.keys[x]:
1277 self.keys[x]["uid"] = format % "invalid-uid"
1279 uid = format % self.keys[x]["email"]
1280 keyid = get_or_set_uid(uid, session).uid_id
1281 byuid[keyid] = (uid, self.keys[x]["name"])
1282 byname[uid] = (keyid, self.keys[x]["name"])
1283 self.keys[x]["uid"] = uid
1286 uid = format % "invalid-uid"
1287 keyid = get_or_set_uid(uid, session).uid_id
1288 byuid[keyid] = (uid, "ungeneratable user id")
1289 byname[uid] = (keyid, "ungeneratable user id")
1291 return (byname, byuid)
1293 __all__.append('Keyring')
1296 def get_keyring(keyring, session=None):
1298 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1299 If C{keyring} already has an entry, simply return the existing Keyring
1301 @type keyring: string
1302 @param keyring: the keyring name
1305 @return: the Keyring object for this keyring
1308 q = session.query(Keyring).filter_by(keyring_name=keyring)
1312 except NoResultFound:
1315 __all__.append('get_keyring')
1318 def get_active_keyring_paths(session=None):
1321 @return: list of active keyring paths
1323 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1325 __all__.append('get_active_keyring_paths')
1328 def get_primary_keyring_path(session=None):
1330 Get the full path to the highest priority active keyring
1333 @return: path to the active keyring with the highest priority or None if no
1334 keyring is configured
1336 keyrings = get_active_keyring_paths()
1338 if len(keyrings) > 0:
1343 __all__.append('get_primary_keyring_path')
1345 ################################################################################
1347 class DBChange(object):
1348 def __init__(self, *args, **kwargs):
1352 return '<DBChange %s>' % self.changesname
1354 __all__.append('DBChange')
1357 def get_dbchange(filename, session=None):
1359 returns DBChange object for given C{filename}.
1361 @type filename: string
1362 @param filename: the name of the file
1364 @type session: Session
1365 @param session: Optional SQLA session object (a temporary one will be
1366 generated if not supplied)
1369 @return: DBChange object for the given filename (C{None} if not present)
1372 q = session.query(DBChange).filter_by(changesname=filename)
1376 except NoResultFound:
1379 __all__.append('get_dbchange')
1381 ################################################################################
1383 class Maintainer(ORMObject):
1384 def __init__(self, name = None):
1387 def properties(self):
1388 return ['name', 'maintainer_id']
1390 def not_null_constraints(self):
1393 def get_split_maintainer(self):
1394 if not hasattr(self, 'name') or self.name is None:
1395 return ('', '', '', '')
1397 return fix_maintainer(self.name.strip())
1399 __all__.append('Maintainer')
1402 def get_or_set_maintainer(name, session=None):
1404 Returns Maintainer object for given maintainer name.
1406 If no matching maintainer name is found, a row is inserted.
1409 @param name: The maintainer name to add
1411 @type session: SQLAlchemy
1412 @param session: Optional SQL session object (a temporary one will be
1413 generated if not supplied). If not passed, a commit will be performed at
1414 the end of the function, otherwise the caller is responsible for commiting.
1415 A flush will be performed either way.
1418 @return: the Maintainer object for the given maintainer
1421 q = session.query(Maintainer).filter_by(name=name)
1424 except NoResultFound:
1425 maintainer = Maintainer()
1426 maintainer.name = name
1427 session.add(maintainer)
1428 session.commit_or_flush()
1433 __all__.append('get_or_set_maintainer')
1436 def get_maintainer(maintainer_id, session=None):
1438 Return the name of the maintainer behind C{maintainer_id} or None if that
1439 maintainer_id is invalid.
1441 @type maintainer_id: int
1442 @param maintainer_id: the id of the maintainer
1445 @return: the Maintainer with this C{maintainer_id}
1448 return session.query(Maintainer).get(maintainer_id)
1450 __all__.append('get_maintainer')
1452 ################################################################################
1454 class NewComment(object):
1455 def __init__(self, *args, **kwargs):
1459 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1461 __all__.append('NewComment')
1464 def has_new_comment(policy_queue, package, version, session=None):
1466 Returns true if the given combination of C{package}, C{version} has a comment.
1468 @type package: string
1469 @param package: name of the package
1471 @type version: string
1472 @param version: package version
1474 @type session: Session
1475 @param session: Optional SQLA session object (a temporary one will be
1476 generated if not supplied)
1482 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1483 q = q.filter_by(package=package)
1484 q = q.filter_by(version=version)
1486 return bool(q.count() > 0)
1488 __all__.append('has_new_comment')
1491 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1493 Returns (possibly empty) list of NewComment objects for the given
1496 @type package: string (optional)
1497 @param package: name of the package
1499 @type version: string (optional)
1500 @param version: package version
1502 @type comment_id: int (optional)
1503 @param comment_id: An id of a comment
1505 @type session: Session
1506 @param session: Optional SQLA session object (a temporary one will be
1507 generated if not supplied)
1510 @return: A (possibly empty) list of NewComment objects will be returned
1513 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1514 if package is not None: q = q.filter_by(package=package)
1515 if version is not None: q = q.filter_by(version=version)
1516 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1520 __all__.append('get_new_comments')
1522 ################################################################################
1524 class Override(ORMObject):
1525 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1526 section = None, priority = None):
1527 self.package = package
1529 self.component = component
1530 self.overridetype = overridetype
1531 self.section = section
1532 self.priority = priority
1534 def properties(self):
1535 return ['package', 'suite', 'component', 'overridetype', 'section', \
1538 def not_null_constraints(self):
1539 return ['package', 'suite', 'component', 'overridetype', 'section']
1541 __all__.append('Override')
1544 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1546 Returns Override object for the given parameters
1548 @type package: string
1549 @param package: The name of the package
1551 @type suite: string, list or None
1552 @param suite: The name of the suite (or suites if a list) to limit to. If
1553 None, don't limit. Defaults to None.
1555 @type component: string, list or None
1556 @param component: The name of the component (or components if a list) to
1557 limit to. If None, don't limit. Defaults to None.
1559 @type overridetype: string, list or None
1560 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1561 limit to. If None, don't limit. Defaults to None.
1563 @type session: Session
1564 @param session: Optional SQLA session object (a temporary one will be
1565 generated if not supplied)
1568 @return: A (possibly empty) list of Override objects will be returned
1571 q = session.query(Override)
1572 q = q.filter_by(package=package)
1574 if suite is not None:
1575 if not isinstance(suite, list): suite = [suite]
1576 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1578 if component is not None:
1579 if not isinstance(component, list): component = [component]
1580 q = q.join(Component).filter(Component.component_name.in_(component))
1582 if overridetype is not None:
1583 if not isinstance(overridetype, list): overridetype = [overridetype]
1584 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1588 __all__.append('get_override')
1591 ################################################################################
1593 class OverrideType(ORMObject):
1594 def __init__(self, overridetype = None):
1595 self.overridetype = overridetype
1597 def properties(self):
1598 return ['overridetype', 'overridetype_id', 'overrides_count']
1600 def not_null_constraints(self):
1601 return ['overridetype']
1603 __all__.append('OverrideType')
1606 def get_override_type(override_type, session=None):
1608 Returns OverrideType object for given C{override type}.
1610 @type override_type: string
1611 @param override_type: The name of the override type
1613 @type session: Session
1614 @param session: Optional SQLA session object (a temporary one will be
1615 generated if not supplied)
1618 @return: the database id for the given override type
1621 q = session.query(OverrideType).filter_by(overridetype=override_type)
1625 except NoResultFound:
1628 __all__.append('get_override_type')
1630 ################################################################################
1632 class PolicyQueue(object):
1633 def __init__(self, *args, **kwargs):
1637 return '<PolicyQueue %s>' % self.queue_name
1639 __all__.append('PolicyQueue')
1642 def get_policy_queue(queuename, session=None):
1644 Returns PolicyQueue object for given C{queue name}
1646 @type queuename: string
1647 @param queuename: The name of the queue
1649 @type session: Session
1650 @param session: Optional SQLA session object (a temporary one will be
1651 generated if not supplied)
1654 @return: PolicyQueue object for the given queue
1657 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1661 except NoResultFound:
1664 __all__.append('get_policy_queue')
1666 ################################################################################
1668 class PolicyQueueUpload(object):
1669 def __cmp__(self, other):
1670 ret = cmp(self.changes.source, other.changes.source)
1672 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1674 if self.source is not None and other.source is None:
1676 elif self.source is None and other.source is not None:
1679 ret = cmp(self.changes.changesname, other.changes.changesname)
1682 __all__.append('PolicyQueueUpload')
1684 ################################################################################
1686 class PolicyQueueByhandFile(object):
1689 __all__.append('PolicyQueueByhandFile')
1691 ################################################################################
1693 class Priority(ORMObject):
1694 def __init__(self, priority = None, level = None):
1695 self.priority = priority
1698 def properties(self):
1699 return ['priority', 'priority_id', 'level', 'overrides_count']
1701 def not_null_constraints(self):
1702 return ['priority', 'level']
1704 def __eq__(self, val):
1705 if isinstance(val, str):
1706 return (self.priority == val)
1707 # This signals to use the normal comparison operator
1708 return NotImplemented
1710 def __ne__(self, val):
1711 if isinstance(val, str):
1712 return (self.priority != val)
1713 # This signals to use the normal comparison operator
1714 return NotImplemented
1716 __all__.append('Priority')
1719 def get_priority(priority, session=None):
1721 Returns Priority object for given C{priority name}.
1723 @type priority: string
1724 @param priority: The name of the priority
1726 @type session: Session
1727 @param session: Optional SQLA session object (a temporary one will be
1728 generated if not supplied)
1731 @return: Priority object for the given priority
1734 q = session.query(Priority).filter_by(priority=priority)
1738 except NoResultFound:
1741 __all__.append('get_priority')
1744 def get_priorities(session=None):
1746 Returns dictionary of priority names -> id mappings
1748 @type session: Session
1749 @param session: Optional SQL session object (a temporary one will be
1750 generated if not supplied)
1753 @return: dictionary of priority names -> id mappings
1757 q = session.query(Priority)
1759 ret[x.priority] = x.priority_id
1763 __all__.append('get_priorities')
1765 ################################################################################
1767 class Section(ORMObject):
1768 def __init__(self, section = None):
1769 self.section = section
1771 def properties(self):
1772 return ['section', 'section_id', 'overrides_count']
1774 def not_null_constraints(self):
1777 def __eq__(self, val):
1778 if isinstance(val, str):
1779 return (self.section == val)
1780 # This signals to use the normal comparison operator
1781 return NotImplemented
1783 def __ne__(self, val):
1784 if isinstance(val, str):
1785 return (self.section != val)
1786 # This signals to use the normal comparison operator
1787 return NotImplemented
1789 __all__.append('Section')
1792 def get_section(section, session=None):
1794 Returns Section object for given C{section name}.
1796 @type section: string
1797 @param section: The name of the section
1799 @type session: Session
1800 @param session: Optional SQLA session object (a temporary one will be
1801 generated if not supplied)
1804 @return: Section object for the given section name
1807 q = session.query(Section).filter_by(section=section)
1811 except NoResultFound:
1814 __all__.append('get_section')
1817 def get_sections(session=None):
1819 Returns dictionary of section names -> id mappings
1821 @type session: Session
1822 @param session: Optional SQL session object (a temporary one will be
1823 generated if not supplied)
1826 @return: dictionary of section names -> id mappings
1830 q = session.query(Section)
1832 ret[x.section] = x.section_id
1836 __all__.append('get_sections')
1838 ################################################################################
1840 class SignatureHistory(ORMObject):
1842 def from_signed_file(cls, signed_file):
1843 """signature history entry from signed file
1845 @type signed_file: L{daklib.gpg.SignedFile}
1846 @param signed_file: signed file
1848 @rtype: L{SignatureHistory}
1851 self.fingerprint = signed_file.primary_fingerprint
1852 self.signature_timestamp = signed_file.signature_timestamp
1853 self.contents_sha1 = signed_file.contents_sha1()
1856 __all__.append('SignatureHistory')
1858 ################################################################################
1860 class SrcContents(ORMObject):
1861 def __init__(self, file = None, source = None):
1863 self.source = source
1865 def properties(self):
1866 return ['file', 'source']
1868 __all__.append('SrcContents')
1870 ################################################################################
1872 from debian.debfile import Deb822
1874 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1875 class Dak822(Deb822):
1876 def _internal_parser(self, sequence, fields=None):
1877 # The key is non-whitespace, non-colon characters before any colon.
1878 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1879 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1880 multi = re.compile(key_part + r"$")
1881 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1883 wanted_field = lambda f: fields is None or f in fields
1885 if isinstance(sequence, basestring):
1886 sequence = sequence.splitlines()
1890 for line in self.gpg_stripped_paragraph(sequence):
1891 m = single.match(line)
1894 self[curkey] = content
1896 if not wanted_field(m.group('key')):
1900 curkey = m.group('key')
1901 content = m.group('data')
1904 m = multi.match(line)
1907 self[curkey] = content
1909 if not wanted_field(m.group('key')):
1913 curkey = m.group('key')
1917 m = multidata.match(line)
1919 content += '\n' + line # XXX not m.group('data')?
1923 self[curkey] = content
1926 class DBSource(ORMObject):
1927 def __init__(self, source = None, version = None, maintainer = None, \
1928 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1929 self.source = source
1930 self.version = version
1931 self.maintainer = maintainer
1932 self.changedby = changedby
1933 self.poolfile = poolfile
1934 self.install_date = install_date
1935 self.fingerprint = fingerprint
1939 return self.source_id
1941 def properties(self):
1942 return ['source', 'source_id', 'maintainer', 'changedby', \
1943 'fingerprint', 'poolfile', 'version', 'suites_count', \
1944 'install_date', 'binaries_count', 'uploaders_count']
1946 def not_null_constraints(self):
1947 return ['source', 'version', 'install_date', 'maintainer', \
1948 'changedby', 'poolfile']
1950 def read_control_fields(self):
1952 Reads the control information from a dsc
1955 @return: fields is the dsc information in a dictionary form
1957 fullpath = self.poolfile.fullpath
1958 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1961 metadata = association_proxy('key', 'value')
1963 def scan_contents(self):
1965 Returns a set of names for non directories. The path names are
1966 normalized after converting them from either utf-8 or iso8859-1
1969 fullpath = self.poolfile.fullpath
1970 from daklib.contents import UnpackedSource
1971 unpacked = UnpackedSource(fullpath)
1973 for name in unpacked.get_all_filenames():
1974 # enforce proper utf-8 encoding
1976 name.decode('utf-8')
1977 except UnicodeDecodeError:
1978 name = name.decode('iso8859-1').encode('utf-8')
1982 __all__.append('DBSource')
1985 def source_exists(source, source_version, suites = ["any"], session=None):
1987 Ensure that source exists somewhere in the archive for the binary
1988 upload being processed.
1989 1. exact match => 1.0-3
1990 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
1992 @type source: string
1993 @param source: source name
1995 @type source_version: string
1996 @param source_version: expected source version
1999 @param suites: list of suites to check in, default I{any}
2001 @type session: Session
2002 @param session: Optional SQLA session object (a temporary one will be
2003 generated if not supplied)
2006 @return: returns 1 if a source with expected version is found, otherwise 0
2013 from daklib.regexes import re_bin_only_nmu
2014 orig_source_version = re_bin_only_nmu.sub('', source_version)
2016 for suite in suites:
2017 q = session.query(DBSource).filter_by(source=source). \
2018 filter(DBSource.version.in_([source_version, orig_source_version]))
2020 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2021 s = get_suite(suite, session)
2023 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2024 considered_suites = [ vc.reference for vc in enhances_vcs ]
2025 considered_suites.append(s)
2027 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2032 # No source found so return not ok
2037 __all__.append('source_exists')
2040 def get_suites_source_in(source, session=None):
2042 Returns list of Suite objects which given C{source} name is in
2045 @param source: DBSource package name to search for
2048 @return: list of Suite objects for the given source
2051 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2053 __all__.append('get_suites_source_in')
2056 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2058 Returns list of DBSource objects for given C{source} name and other parameters
2061 @param source: DBSource package name to search for
2063 @type version: str or None
2064 @param version: DBSource version name to search for or None if not applicable
2066 @type dm_upload_allowed: bool
2067 @param dm_upload_allowed: If None, no effect. If True or False, only
2068 return packages with that dm_upload_allowed setting
2070 @type session: Session
2071 @param session: Optional SQL session object (a temporary one will be
2072 generated if not supplied)
2075 @return: list of DBSource objects for the given name (may be empty)
2078 q = session.query(DBSource).filter_by(source=source)
2080 if version is not None:
2081 q = q.filter_by(version=version)
2083 if dm_upload_allowed is not None:
2084 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2088 __all__.append('get_sources_from_name')
2090 # FIXME: This function fails badly if it finds more than 1 source package and
2091 # its implementation is trivial enough to be inlined.
2093 def get_source_in_suite(source, suite_name, session=None):
2095 Returns a DBSource object for a combination of C{source} and C{suite_name}.
2097 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2098 - B{suite_name} - a suite name, eg. I{unstable}
2100 @type source: string
2101 @param source: source package name
2103 @type suite_name: string
2104 @param suite: the suite name
2107 @return: the version for I{source} in I{suite}
2110 suite = get_suite(suite_name, session)
2114 return suite.get_sources(source).one()
2115 except NoResultFound:
2118 __all__.append('get_source_in_suite')
2121 def import_metadata_into_db(obj, session=None):
2123 This routine works on either DBBinary or DBSource objects and imports
2124 their metadata into the database
2126 fields = obj.read_control_fields()
2127 for k in fields.keys():
2130 val = str(fields[k])
2131 except UnicodeEncodeError:
2132 # Fall back to UTF-8
2134 val = fields[k].encode('utf-8')
2135 except UnicodeEncodeError:
2136 # Finally try iso8859-1
2137 val = fields[k].encode('iso8859-1')
2138 # Otherwise we allow the exception to percolate up and we cause
2139 # a reject as someone is playing silly buggers
2141 obj.metadata[get_or_set_metadatakey(k, session)] = val
2143 session.commit_or_flush()
2145 __all__.append('import_metadata_into_db')
2147 ################################################################################
2149 class SrcFormat(object):
2150 def __init__(self, *args, **kwargs):
2154 return '<SrcFormat %s>' % (self.format_name)
2156 __all__.append('SrcFormat')
2158 ################################################################################
2160 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2161 ('SuiteID', 'suite_id'),
2162 ('Version', 'version'),
2163 ('Origin', 'origin'),
2165 ('Description', 'description'),
2166 ('Untouchable', 'untouchable'),
2167 ('Announce', 'announce'),
2168 ('Codename', 'codename'),
2169 ('OverrideCodename', 'overridecodename'),
2170 ('ValidTime', 'validtime'),
2171 ('Priority', 'priority'),
2172 ('NotAutomatic', 'notautomatic'),
2173 ('CopyChanges', 'copychanges'),
2174 ('OverrideSuite', 'overridesuite')]
2176 # Why the heck don't we have any UNIQUE constraints in table suite?
2177 # TODO: Add UNIQUE constraints for appropriate columns.
2178 class Suite(ORMObject):
2179 def __init__(self, suite_name = None, version = None):
2180 self.suite_name = suite_name
2181 self.version = version
2183 def properties(self):
2184 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2187 def not_null_constraints(self):
2188 return ['suite_name']
2190 def __eq__(self, val):
2191 if isinstance(val, str):
2192 return (self.suite_name == val)
2193 # This signals to use the normal comparison operator
2194 return NotImplemented
2196 def __ne__(self, val):
2197 if isinstance(val, str):
2198 return (self.suite_name != val)
2199 # This signals to use the normal comparison operator
2200 return NotImplemented
2204 for disp, field in SUITE_FIELDS:
2205 val = getattr(self, field, None)
2207 ret.append("%s: %s" % (disp, val))
2209 return "\n".join(ret)
2211 def get_architectures(self, skipsrc=False, skipall=False):
2213 Returns list of Architecture objects
2215 @type skipsrc: boolean
2216 @param skipsrc: Whether to skip returning the 'source' architecture entry
2219 @type skipall: boolean
2220 @param skipall: Whether to skip returning the 'all' architecture entry
2224 @return: list of Architecture objects for the given name (may be empty)
2227 q = object_session(self).query(Architecture).with_parent(self)
2229 q = q.filter(Architecture.arch_string != 'source')
2231 q = q.filter(Architecture.arch_string != 'all')
2232 return q.order_by(Architecture.arch_string).all()
2234 def get_sources(self, source):
2236 Returns a query object representing DBSource that is part of C{suite}.
2238 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2240 @type source: string
2241 @param source: source package name
2243 @rtype: sqlalchemy.orm.query.Query
2244 @return: a query of DBSource
2248 session = object_session(self)
2249 return session.query(DBSource).filter_by(source = source). \
2252 def get_overridesuite(self):
2253 if self.overridesuite is None:
2256 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2260 return os.path.join(self.archive.path, 'dists', self.suite_name)
2262 __all__.append('Suite')
2265 def get_suite(suite, session=None):
2267 Returns Suite object for given C{suite name}.
2270 @param suite: The name of the suite
2272 @type session: Session
2273 @param session: Optional SQLA session object (a temporary one will be
2274 generated if not supplied)
2277 @return: Suite object for the requested suite name (None if not present)
2280 q = session.query(Suite).filter_by(suite_name=suite)
2284 except NoResultFound:
2287 __all__.append('get_suite')
2289 ################################################################################
2292 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2294 Returns list of Architecture objects for given C{suite} name. The list is
2295 empty if suite does not exist.
2298 @param suite: Suite name to search for
2300 @type skipsrc: boolean
2301 @param skipsrc: Whether to skip returning the 'source' architecture entry
2304 @type skipall: boolean
2305 @param skipall: Whether to skip returning the 'all' architecture entry
2308 @type session: Session
2309 @param session: Optional SQL session object (a temporary one will be
2310 generated if not supplied)
2313 @return: list of Architecture objects for the given name (may be empty)
2317 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2318 except AttributeError:
2321 __all__.append('get_suite_architectures')
2323 ################################################################################
2325 class Uid(ORMObject):
2326 def __init__(self, uid = None, name = None):
2330 def __eq__(self, val):
2331 if isinstance(val, str):
2332 return (self.uid == val)
2333 # This signals to use the normal comparison operator
2334 return NotImplemented
2336 def __ne__(self, val):
2337 if isinstance(val, str):
2338 return (self.uid != val)
2339 # This signals to use the normal comparison operator
2340 return NotImplemented
2342 def properties(self):
2343 return ['uid', 'name', 'fingerprint']
2345 def not_null_constraints(self):
2348 __all__.append('Uid')
2351 def get_or_set_uid(uidname, session=None):
2353 Returns uid object for given uidname.
2355 If no matching uidname is found, a row is inserted.
2357 @type uidname: string
2358 @param uidname: The uid to add
2360 @type session: SQLAlchemy
2361 @param session: Optional SQL session object (a temporary one will be
2362 generated if not supplied). If not passed, a commit will be performed at
2363 the end of the function, otherwise the caller is responsible for commiting.
2366 @return: the uid object for the given uidname
2369 q = session.query(Uid).filter_by(uid=uidname)
2373 except NoResultFound:
2377 session.commit_or_flush()
2382 __all__.append('get_or_set_uid')
2385 def get_uid_from_fingerprint(fpr, session=None):
2386 q = session.query(Uid)
2387 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2391 except NoResultFound:
2394 __all__.append('get_uid_from_fingerprint')
2396 ################################################################################
2398 class MetadataKey(ORMObject):
2399 def __init__(self, key = None):
2402 def properties(self):
2405 def not_null_constraints(self):
2408 __all__.append('MetadataKey')
2411 def get_or_set_metadatakey(keyname, session=None):
2413 Returns MetadataKey object for given uidname.
2415 If no matching keyname is found, a row is inserted.
2417 @type uidname: string
2418 @param uidname: The keyname to add
2420 @type session: SQLAlchemy
2421 @param session: Optional SQL session object (a temporary one will be
2422 generated if not supplied). If not passed, a commit will be performed at
2423 the end of the function, otherwise the caller is responsible for commiting.
2426 @return: the metadatakey object for the given keyname
2429 q = session.query(MetadataKey).filter_by(key=keyname)
2433 except NoResultFound:
2434 ret = MetadataKey(keyname)
2436 session.commit_or_flush()
2440 __all__.append('get_or_set_metadatakey')
2442 ################################################################################
2444 class BinaryMetadata(ORMObject):
2445 def __init__(self, key = None, value = None, binary = None):
2448 self.binary = binary
2450 def properties(self):
2451 return ['binary', 'key', 'value']
2453 def not_null_constraints(self):
2456 __all__.append('BinaryMetadata')
2458 ################################################################################
2460 class SourceMetadata(ORMObject):
2461 def __init__(self, key = None, value = None, source = None):
2464 self.source = source
2466 def properties(self):
2467 return ['source', 'key', 'value']
2469 def not_null_constraints(self):
2472 __all__.append('SourceMetadata')
2474 ################################################################################
2476 class VersionCheck(ORMObject):
2477 def __init__(self, *args, **kwargs):
2480 def properties(self):
2481 #return ['suite_id', 'check', 'reference_id']
2484 def not_null_constraints(self):
2485 return ['suite', 'check', 'reference']
2487 __all__.append('VersionCheck')
2490 def get_version_checks(suite_name, check = None, session = None):
2491 suite = get_suite(suite_name, session)
2493 # Make sure that what we return is iterable so that list comprehensions
2494 # involving this don't cause a traceback
2496 q = session.query(VersionCheck).filter_by(suite=suite)
2498 q = q.filter_by(check=check)
2501 __all__.append('get_version_checks')
2503 ################################################################################
2505 class DBConn(object):
2507 database module init.
2511 def __init__(self, *args, **kwargs):
2512 self.__dict__ = self.__shared_state
2514 if not getattr(self, 'initialised', False):
2515 self.initialised = True
2516 self.debug = kwargs.has_key('debug')
2519 def __setuptables(self):
2522 'acl_architecture_map',
2523 'acl_fingerprint_map',
2530 'binaries_metadata',
2538 'external_overrides',
2539 'extra_src_references',
2541 'files_archive_map',
2547 # TODO: the maintainer column in table override should be removed.
2551 'policy_queue_upload',
2552 'policy_queue_upload_binaries_map',
2553 'policy_queue_byhand_file',
2556 'signature_history',
2565 'suite_architectures',
2566 'suite_build_queue_copy',
2567 'suite_src_formats',
2573 'almost_obsolete_all_associations',
2574 'almost_obsolete_src_associations',
2575 'any_associations_source',
2576 'bin_associations_binaries',
2577 'binaries_suite_arch',
2580 'newest_all_associations',
2581 'newest_any_associations',
2583 'newest_src_association',
2584 'obsolete_all_associations',
2585 'obsolete_any_associations',
2586 'obsolete_any_by_all_associations',
2587 'obsolete_src_associations',
2589 'src_associations_bin',
2590 'src_associations_src',
2591 'suite_arch_by_name',
2594 for table_name in tables:
2595 table = Table(table_name, self.db_meta, \
2596 autoload=True, useexisting=True)
2597 setattr(self, 'tbl_%s' % table_name, table)
2599 for view_name in views:
2600 view = Table(view_name, self.db_meta, autoload=True)
2601 setattr(self, 'view_%s' % view_name, view)
2603 def __setupmappers(self):
2604 mapper(Architecture, self.tbl_architecture,
2605 properties = dict(arch_id = self.tbl_architecture.c.id,
2606 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2607 order_by=self.tbl_suite.c.suite_name,
2608 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2609 extension = validator)
2611 mapper(ACL, self.tbl_acl,
2613 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2614 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2615 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2616 per_source = relation(ACLPerSource, collection_class=set),
2619 mapper(ACLPerSource, self.tbl_acl_per_source,
2621 acl = relation(ACL),
2622 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2623 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2626 mapper(Archive, self.tbl_archive,
2627 properties = dict(archive_id = self.tbl_archive.c.id,
2628 archive_name = self.tbl_archive.c.name))
2630 mapper(ArchiveFile, self.tbl_files_archive_map,
2631 properties = dict(archive = relation(Archive, backref='files'),
2632 component = relation(Component),
2633 file = relation(PoolFile, backref='archives')))
2635 mapper(BuildQueue, self.tbl_build_queue,
2636 properties = dict(queue_id = self.tbl_build_queue.c.id,
2637 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2639 mapper(DBBinary, self.tbl_binaries,
2640 properties = dict(binary_id = self.tbl_binaries.c.id,
2641 package = self.tbl_binaries.c.package,
2642 version = self.tbl_binaries.c.version,
2643 maintainer_id = self.tbl_binaries.c.maintainer,
2644 maintainer = relation(Maintainer),
2645 source_id = self.tbl_binaries.c.source,
2646 source = relation(DBSource, backref='binaries'),
2647 arch_id = self.tbl_binaries.c.architecture,
2648 architecture = relation(Architecture),
2649 poolfile_id = self.tbl_binaries.c.file,
2650 poolfile = relation(PoolFile),
2651 binarytype = self.tbl_binaries.c.type,
2652 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2653 fingerprint = relation(Fingerprint),
2654 install_date = self.tbl_binaries.c.install_date,
2655 suites = relation(Suite, secondary=self.tbl_bin_associations,
2656 backref=backref('binaries', lazy='dynamic')),
2657 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2658 backref=backref('extra_binary_references', lazy='dynamic')),
2659 key = relation(BinaryMetadata, cascade='all',
2660 collection_class=attribute_mapped_collection('key'))),
2661 extension = validator)
2663 mapper(Component, self.tbl_component,
2664 properties = dict(component_id = self.tbl_component.c.id,
2665 component_name = self.tbl_component.c.name),
2666 extension = validator)
2668 mapper(DBConfig, self.tbl_config,
2669 properties = dict(config_id = self.tbl_config.c.id))
2671 mapper(DSCFile, self.tbl_dsc_files,
2672 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2673 source_id = self.tbl_dsc_files.c.source,
2674 source = relation(DBSource),
2675 poolfile_id = self.tbl_dsc_files.c.file,
2676 poolfile = relation(PoolFile)))
2678 mapper(ExternalOverride, self.tbl_external_overrides,
2680 suite_id = self.tbl_external_overrides.c.suite,
2681 suite = relation(Suite),
2682 component_id = self.tbl_external_overrides.c.component,
2683 component = relation(Component)))
2685 mapper(PoolFile, self.tbl_files,
2686 properties = dict(file_id = self.tbl_files.c.id,
2687 filesize = self.tbl_files.c.size),
2688 extension = validator)
2690 mapper(Fingerprint, self.tbl_fingerprint,
2691 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2692 uid_id = self.tbl_fingerprint.c.uid,
2693 uid = relation(Uid),
2694 keyring_id = self.tbl_fingerprint.c.keyring,
2695 keyring = relation(Keyring),
2696 acl = relation(ACL)),
2697 extension = validator)
2699 mapper(Keyring, self.tbl_keyrings,
2700 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2701 keyring_id = self.tbl_keyrings.c.id,
2702 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2704 mapper(DBChange, self.tbl_changes,
2705 properties = dict(change_id = self.tbl_changes.c.id,
2706 seen = self.tbl_changes.c.seen,
2707 source = self.tbl_changes.c.source,
2708 binaries = self.tbl_changes.c.binaries,
2709 architecture = self.tbl_changes.c.architecture,
2710 distribution = self.tbl_changes.c.distribution,
2711 urgency = self.tbl_changes.c.urgency,
2712 maintainer = self.tbl_changes.c.maintainer,
2713 changedby = self.tbl_changes.c.changedby,
2714 date = self.tbl_changes.c.date,
2715 version = self.tbl_changes.c.version))
2717 mapper(Maintainer, self.tbl_maintainer,
2718 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2719 maintains_sources = relation(DBSource, backref='maintainer',
2720 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2721 changed_sources = relation(DBSource, backref='changedby',
2722 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2723 extension = validator)
2725 mapper(NewComment, self.tbl_new_comments,
2726 properties = dict(comment_id = self.tbl_new_comments.c.id,
2727 policy_queue = relation(PolicyQueue)))
2729 mapper(Override, self.tbl_override,
2730 properties = dict(suite_id = self.tbl_override.c.suite,
2731 suite = relation(Suite, \
2732 backref=backref('overrides', lazy='dynamic')),
2733 package = self.tbl_override.c.package,
2734 component_id = self.tbl_override.c.component,
2735 component = relation(Component, \
2736 backref=backref('overrides', lazy='dynamic')),
2737 priority_id = self.tbl_override.c.priority,
2738 priority = relation(Priority, \
2739 backref=backref('overrides', lazy='dynamic')),
2740 section_id = self.tbl_override.c.section,
2741 section = relation(Section, \
2742 backref=backref('overrides', lazy='dynamic')),
2743 overridetype_id = self.tbl_override.c.type,
2744 overridetype = relation(OverrideType, \
2745 backref=backref('overrides', lazy='dynamic'))))
2747 mapper(OverrideType, self.tbl_override_type,
2748 properties = dict(overridetype = self.tbl_override_type.c.type,
2749 overridetype_id = self.tbl_override_type.c.id))
2751 mapper(PolicyQueue, self.tbl_policy_queue,
2752 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2753 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2755 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2757 changes = relation(DBChange),
2758 policy_queue = relation(PolicyQueue, backref='uploads'),
2759 target_suite = relation(Suite),
2760 source = relation(DBSource),
2761 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2764 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2766 upload = relation(PolicyQueueUpload, backref='byhand'),
2770 mapper(Priority, self.tbl_priority,
2771 properties = dict(priority_id = self.tbl_priority.c.id))
2773 mapper(Section, self.tbl_section,
2774 properties = dict(section_id = self.tbl_section.c.id,
2775 section=self.tbl_section.c.section))
2777 mapper(SignatureHistory, self.tbl_signature_history)
2779 mapper(DBSource, self.tbl_source,
2780 properties = dict(source_id = self.tbl_source.c.id,
2781 version = self.tbl_source.c.version,
2782 maintainer_id = self.tbl_source.c.maintainer,
2783 poolfile_id = self.tbl_source.c.file,
2784 poolfile = relation(PoolFile),
2785 fingerprint_id = self.tbl_source.c.sig_fpr,
2786 fingerprint = relation(Fingerprint),
2787 changedby_id = self.tbl_source.c.changedby,
2788 srcfiles = relation(DSCFile,
2789 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2790 suites = relation(Suite, secondary=self.tbl_src_associations,
2791 backref=backref('sources', lazy='dynamic')),
2792 uploaders = relation(Maintainer,
2793 secondary=self.tbl_src_uploaders),
2794 key = relation(SourceMetadata, cascade='all',
2795 collection_class=attribute_mapped_collection('key'))),
2796 extension = validator)
2798 mapper(SrcFormat, self.tbl_src_format,
2799 properties = dict(src_format_id = self.tbl_src_format.c.id,
2800 format_name = self.tbl_src_format.c.format_name))
2802 mapper(Suite, self.tbl_suite,
2803 properties = dict(suite_id = self.tbl_suite.c.id,
2804 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2805 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2806 copy_queues = relation(BuildQueue,
2807 secondary=self.tbl_suite_build_queue_copy),
2808 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2809 backref=backref('suites', lazy='dynamic')),
2810 archive = relation(Archive, backref='suites'),
2811 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2812 components = relation(Component, secondary=self.tbl_component_suite,
2813 order_by=self.tbl_component.c.ordering,
2814 backref=backref('suites'))),
2815 extension = validator)
2817 mapper(Uid, self.tbl_uid,
2818 properties = dict(uid_id = self.tbl_uid.c.id,
2819 fingerprint = relation(Fingerprint)),
2820 extension = validator)
2822 mapper(BinContents, self.tbl_bin_contents,
2824 binary = relation(DBBinary,
2825 backref=backref('contents', lazy='dynamic', cascade='all')),
2826 file = self.tbl_bin_contents.c.file))
2828 mapper(SrcContents, self.tbl_src_contents,
2830 source = relation(DBSource,
2831 backref=backref('contents', lazy='dynamic', cascade='all')),
2832 file = self.tbl_src_contents.c.file))
2834 mapper(MetadataKey, self.tbl_metadata_keys,
2836 key_id = self.tbl_metadata_keys.c.key_id,
2837 key = self.tbl_metadata_keys.c.key))
2839 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2841 binary_id = self.tbl_binaries_metadata.c.bin_id,
2842 binary = relation(DBBinary),
2843 key_id = self.tbl_binaries_metadata.c.key_id,
2844 key = relation(MetadataKey),
2845 value = self.tbl_binaries_metadata.c.value))
2847 mapper(SourceMetadata, self.tbl_source_metadata,
2849 source_id = self.tbl_source_metadata.c.src_id,
2850 source = relation(DBSource),
2851 key_id = self.tbl_source_metadata.c.key_id,
2852 key = relation(MetadataKey),
2853 value = self.tbl_source_metadata.c.value))
2855 mapper(VersionCheck, self.tbl_version_check,
2857 suite_id = self.tbl_version_check.c.suite,
2858 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2859 reference_id = self.tbl_version_check.c.reference,
2860 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2862 ## Connection functions
2863 def __createconn(self):
2864 from config import Config
2866 if cnf.has_key("DB::Service"):
2867 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2868 elif cnf.has_key("DB::Host"):
2870 connstr = "postgresql://%s" % cnf["DB::Host"]
2871 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2872 connstr += ":%s" % cnf["DB::Port"]
2873 connstr += "/%s" % cnf["DB::Name"]
2876 connstr = "postgresql:///%s" % cnf["DB::Name"]
2877 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2878 connstr += "?port=%s" % cnf["DB::Port"]
2880 engine_args = { 'echo': self.debug }
2881 if cnf.has_key('DB::PoolSize'):
2882 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2883 if cnf.has_key('DB::MaxOverflow'):
2884 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2885 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2886 cnf['DB::Unicode'] == 'false':
2887 engine_args['use_native_unicode'] = False
2889 # Monkey patch a new dialect in in order to support service= syntax
2890 import sqlalchemy.dialects.postgresql
2891 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2892 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2893 def create_connect_args(self, url):
2894 if str(url).startswith('postgresql://service='):
2896 servicename = str(url)[21:]
2897 return (['service=%s' % servicename], {})
2899 return PGDialect_psycopg2.create_connect_args(self, url)
2901 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2904 self.db_pg = create_engine(connstr, **engine_args)
2905 self.db_meta = MetaData()
2906 self.db_meta.bind = self.db_pg
2907 self.db_smaker = sessionmaker(bind=self.db_pg,
2911 self.__setuptables()
2912 self.__setupmappers()
2914 except OperationalError as e:
2916 utils.fubar("Cannot connect to database (%s)" % str(e))
2918 self.pid = os.getpid()
2920 def session(self, work_mem = 0):
2922 Returns a new session object. If a work_mem parameter is provided a new
2923 transaction is started and the work_mem parameter is set for this
2924 transaction. The work_mem parameter is measured in MB. A default value
2925 will be used if the parameter is not set.
2927 # reinitialize DBConn in new processes
2928 if self.pid != os.getpid():
2931 session = self.db_smaker()
2933 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2936 __all__.append('DBConn')