5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 warnings.filterwarnings('ignore', \
85 "Predicate of partial index .* ignored during reflection", \
89 ################################################################################
91 # Patch in support for the debversion field type so that it works during
95 # that is for sqlalchemy 0.6
96 UserDefinedType = sqltypes.UserDefinedType
98 # this one for sqlalchemy 0.5
99 UserDefinedType = sqltypes.TypeEngine
101 class DebVersion(UserDefinedType):
102 def get_col_spec(self):
105 def bind_processor(self, dialect):
108 # ' = None' is needed for sqlalchemy 0.5:
109 def result_processor(self, dialect, coltype = None):
112 sa_major_version = sqlalchemy.__version__[0:3]
113 if sa_major_version in ["0.5", "0.6", "0.7", "0.8"]:
114 from sqlalchemy.databases import postgres
115 postgres.ischema_names['debversion'] = DebVersion
117 raise Exception("dak only ported to SQLA versions 0.5 to 0.8. See daklib/dbconn.py")
119 ################################################################################
121 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
123 ################################################################################
125 def session_wrapper(fn):
127 Wrapper around common ".., session=None):" handling. If the wrapped
128 function is called without passing 'session', we create a local one
129 and destroy it when the function ends.
131 Also attaches a commit_or_flush method to the session; if we created a
132 local session, this is a synonym for session.commit(), otherwise it is a
133 synonym for session.flush().
136 def wrapped(*args, **kwargs):
137 private_transaction = False
139 # Find the session object
140 session = kwargs.get('session')
143 if len(args) <= len(getargspec(fn)[0]) - 1:
144 # No session specified as last argument or in kwargs
145 private_transaction = True
146 session = kwargs['session'] = DBConn().session()
148 # Session is last argument in args
152 session = args[-1] = DBConn().session()
153 private_transaction = True
155 if private_transaction:
156 session.commit_or_flush = session.commit
158 session.commit_or_flush = session.flush
161 return fn(*args, **kwargs)
163 if private_transaction:
164 # We created a session; close it.
167 wrapped.__doc__ = fn.__doc__
168 wrapped.func_name = fn.func_name
172 __all__.append('session_wrapper')
174 ################################################################################
176 class ORMObject(object):
178 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
179 derived classes must implement the properties() method.
182 def properties(self):
184 This method should be implemented by all derived classes and returns a
185 list of the important properties. The properties 'created' and
186 'modified' will be added automatically. A suffix '_count' should be
187 added to properties that are lists or query objects. The most important
188 property name should be returned as the first element in the list
189 because it is used by repr().
195 Returns a JSON representation of the object based on the properties
196 returned from the properties() method.
199 # add created and modified
200 all_properties = self.properties() + ['created', 'modified']
201 for property in all_properties:
202 # check for list or query
203 if property[-6:] == '_count':
204 real_property = property[:-6]
205 if not hasattr(self, real_property):
207 value = getattr(self, real_property)
208 if hasattr(value, '__len__'):
211 elif hasattr(value, 'count'):
212 # query (but not during validation)
213 if self.in_validation:
215 value = value.count()
217 raise KeyError('Do not understand property %s.' % property)
219 if not hasattr(self, property):
222 value = getattr(self, property)
226 elif isinstance(value, ORMObject):
227 # use repr() for ORMObject types
230 # we want a string for all other types because json cannot
233 data[property] = value
234 return json.dumps(data)
238 Returns the name of the class.
240 return type(self).__name__
244 Returns a short string representation of the object using the first
245 element from the properties() method.
247 primary_property = self.properties()[0]
248 value = getattr(self, primary_property)
249 return '<%s %s>' % (self.classname(), str(value))
253 Returns a human readable form of the object using the properties()
256 return '<%s %s>' % (self.classname(), self.json())
258 def not_null_constraints(self):
260 Returns a list of properties that must be not NULL. Derived classes
261 should override this method if needed.
265 validation_message = \
266 "Validation failed because property '%s' must not be empty in object\n%s"
268 in_validation = False
272 This function validates the not NULL constraints as returned by
273 not_null_constraints(). It raises the DBUpdateError exception if
276 for property in self.not_null_constraints():
277 # TODO: It is a bit awkward that the mapper configuration allow
278 # directly setting the numeric _id columns. We should get rid of it
280 if hasattr(self, property + '_id') and \
281 getattr(self, property + '_id') is not None:
283 if not hasattr(self, property) or getattr(self, property) is None:
284 # str() might lead to races due to a 2nd flush
285 self.in_validation = True
286 message = self.validation_message % (property, str(self))
287 self.in_validation = False
288 raise DBUpdateError(message)
292 def get(cls, primary_key, session = None):
294 This is a support function that allows getting an object by its primary
297 Architecture.get(3[, session])
299 instead of the more verbose
301 session.query(Architecture).get(3)
303 return session.query(cls).get(primary_key)
305 def session(self, replace = False):
307 Returns the current session that is associated with the object. May
308 return None is object is in detached state.
311 return object_session(self)
313 def clone(self, session = None):
315 Clones the current object in a new session and returns the new clone. A
316 fresh session is created if the optional session parameter is not
317 provided. The function will fail if a session is provided and has
320 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
321 an existing object to allow several threads to work with their own
322 instances of an ORMObject.
324 WARNING: Only persistent (committed) objects can be cloned. Changes
325 made to the original object that are not committed yet will get lost.
326 The session of the new object will always be rolled back to avoid
330 if self.session() is None:
331 raise RuntimeError( \
332 'Method clone() failed for detached object:\n%s' % self)
333 self.session().flush()
334 mapper = object_mapper(self)
335 primary_key = mapper.primary_key_from_instance(self)
336 object_class = self.__class__
338 session = DBConn().session()
339 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
340 raise RuntimeError( \
341 'Method clone() failed due to unflushed changes in session.')
342 new_object = session.query(object_class).get(primary_key)
344 if new_object is None:
345 raise RuntimeError( \
346 'Method clone() failed for non-persistent object:\n%s' % self)
349 __all__.append('ORMObject')
351 ################################################################################
353 class Validator(MapperExtension):
355 This class calls the validate() method for each instance for the
356 'before_update' and 'before_insert' events. A global object validator is
357 used for configuring the individual mappers.
360 def before_update(self, mapper, connection, instance):
364 def before_insert(self, mapper, connection, instance):
368 validator = Validator()
370 ################################################################################
372 class ACL(ORMObject):
374 return "<ACL {0}>".format(self.name)
376 __all__.append('ACL')
378 class ACLPerSource(ORMObject):
380 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
382 __all__.append('ACLPerSource')
384 ################################################################################
386 class Architecture(ORMObject):
387 def __init__(self, arch_string = None, description = None):
388 self.arch_string = arch_string
389 self.description = description
391 def __eq__(self, val):
392 if isinstance(val, str):
393 return (self.arch_string== val)
394 # This signals to use the normal comparison operator
395 return NotImplemented
397 def __ne__(self, val):
398 if isinstance(val, str):
399 return (self.arch_string != val)
400 # This signals to use the normal comparison operator
401 return NotImplemented
403 def properties(self):
404 return ['arch_string', 'arch_id', 'suites_count']
406 def not_null_constraints(self):
407 return ['arch_string']
409 __all__.append('Architecture')
412 def get_architecture(architecture, session=None):
414 Returns database id for given C{architecture}.
416 @type architecture: string
417 @param architecture: The name of the architecture
419 @type session: Session
420 @param session: Optional SQLA session object (a temporary one will be
421 generated if not supplied)
424 @return: Architecture object for the given arch (None if not present)
427 q = session.query(Architecture).filter_by(arch_string=architecture)
431 except NoResultFound:
434 __all__.append('get_architecture')
436 ################################################################################
438 class Archive(object):
439 def __init__(self, *args, **kwargs):
443 return '<Archive %s>' % self.archive_name
445 __all__.append('Archive')
448 def get_archive(archive, session=None):
450 returns database id for given C{archive}.
452 @type archive: string
453 @param archive: the name of the arhive
455 @type session: Session
456 @param session: Optional SQLA session object (a temporary one will be
457 generated if not supplied)
460 @return: Archive object for the given name (None if not present)
463 archive = archive.lower()
465 q = session.query(Archive).filter_by(archive_name=archive)
469 except NoResultFound:
472 __all__.append('get_archive')
474 ################################################################################
476 class ArchiveFile(object):
477 def __init__(self, archive=None, component=None, file=None):
478 self.archive = archive
479 self.component = component
483 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
485 __all__.append('ArchiveFile')
487 ################################################################################
489 class BinContents(ORMObject):
490 def __init__(self, file = None, binary = None):
494 def properties(self):
495 return ['file', 'binary']
497 __all__.append('BinContents')
499 ################################################################################
501 def subprocess_setup():
502 # Python installs a SIGPIPE handler by default. This is usually not what
503 # non-Python subprocesses expect.
504 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
506 class DBBinary(ORMObject):
507 def __init__(self, package = None, source = None, version = None, \
508 maintainer = None, architecture = None, poolfile = None, \
509 binarytype = 'deb', fingerprint=None):
510 self.package = package
512 self.version = version
513 self.maintainer = maintainer
514 self.architecture = architecture
515 self.poolfile = poolfile
516 self.binarytype = binarytype
517 self.fingerprint = fingerprint
521 return self.binary_id
523 def properties(self):
524 return ['package', 'version', 'maintainer', 'source', 'architecture', \
525 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
526 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
528 def not_null_constraints(self):
529 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
532 metadata = association_proxy('key', 'value')
534 def scan_contents(self):
536 Yields the contents of the package. Only regular files are yielded and
537 the path names are normalized after converting them from either utf-8
538 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
539 package does not contain any regular file.
541 fullpath = self.poolfile.fullpath
542 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
543 preexec_fn = subprocess_setup)
544 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
545 for member in tar.getmembers():
546 if not member.isdir():
547 name = normpath(member.name)
548 # enforce proper utf-8 encoding
551 except UnicodeDecodeError:
552 name = name.decode('iso8859-1').encode('utf-8')
558 def read_control(self):
560 Reads the control information from a binary.
563 @return: stanza text of the control section.
566 fullpath = self.poolfile.fullpath
567 deb_file = open(fullpath, 'r')
568 stanza = utils.deb_extract_control(deb_file)
573 def read_control_fields(self):
575 Reads the control information from a binary and return
579 @return: fields of the control section as a dictionary.
582 stanza = self.read_control()
583 return apt_pkg.TagSection(stanza)
585 __all__.append('DBBinary')
588 def get_suites_binary_in(package, session=None):
590 Returns list of Suite objects which given C{package} name is in
593 @param package: DBBinary package name to search for
596 @return: list of Suite objects for the given package
599 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
601 __all__.append('get_suites_binary_in')
604 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
606 Returns the component name of the newest binary package in suite_list or
607 None if no package is found. The result can be optionally filtered by a list
608 of architecture names.
611 @param package: DBBinary package name to search for
613 @type suite_list: list of str
614 @param suite_list: list of suite_name items
616 @type arch_list: list of str
617 @param arch_list: optional list of arch_string items that defaults to []
619 @rtype: str or NoneType
620 @return: name of component or None
623 q = session.query(DBBinary).filter_by(package = package). \
624 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
625 if len(arch_list) > 0:
626 q = q.join(DBBinary.architecture). \
627 filter(Architecture.arch_string.in_(arch_list))
628 binary = q.order_by(desc(DBBinary.version)).first()
632 return binary.poolfile.component.component_name
634 __all__.append('get_component_by_package_suite')
636 ################################################################################
638 class BuildQueue(object):
639 def __init__(self, *args, **kwargs):
643 return '<BuildQueue %s>' % self.queue_name
645 __all__.append('BuildQueue')
647 ################################################################################
649 class Component(ORMObject):
650 def __init__(self, component_name = None):
651 self.component_name = component_name
653 def __eq__(self, val):
654 if isinstance(val, str):
655 return (self.component_name == val)
656 # This signals to use the normal comparison operator
657 return NotImplemented
659 def __ne__(self, val):
660 if isinstance(val, str):
661 return (self.component_name != val)
662 # This signals to use the normal comparison operator
663 return NotImplemented
665 def properties(self):
666 return ['component_name', 'component_id', 'description', \
667 'meets_dfsg', 'overrides_count']
669 def not_null_constraints(self):
670 return ['component_name']
673 __all__.append('Component')
676 def get_component(component, session=None):
678 Returns database id for given C{component}.
680 @type component: string
681 @param component: The name of the override type
684 @return: the database id for the given component
687 component = component.lower()
689 q = session.query(Component).filter_by(component_name=component)
693 except NoResultFound:
696 __all__.append('get_component')
699 def get_mapped_component(component_name, session=None):
700 """get component after mappings
702 Evaluate component mappings from ComponentMappings in dak.conf for the
703 given component name.
705 @todo: ansgar wants to get rid of this. It's currently only used for
708 @type component_name: str
709 @param component_name: component name
711 @param session: database session
713 @rtype: L{daklib.dbconn.Component} or C{None}
714 @return: component after applying maps or C{None}
717 for m in cnf.value_list("ComponentMappings"):
718 (src, dst) = m.split()
719 if component_name == src:
721 component = session.query(Component).filter_by(component_name=component_name).first()
724 __all__.append('get_mapped_component')
727 def get_component_names(session=None):
729 Returns list of strings of component names.
732 @return: list of strings of component names
735 return [ x.component_name for x in session.query(Component).all() ]
737 __all__.append('get_component_names')
739 ################################################################################
741 class DBConfig(object):
742 def __init__(self, *args, **kwargs):
746 return '<DBConfig %s>' % self.name
748 __all__.append('DBConfig')
750 ################################################################################
753 def get_or_set_contents_file_id(filename, session=None):
755 Returns database id for given filename.
757 If no matching file is found, a row is inserted.
759 @type filename: string
760 @param filename: The filename
761 @type session: SQLAlchemy
762 @param session: Optional SQL session object (a temporary one will be
763 generated if not supplied). If not passed, a commit will be performed at
764 the end of the function, otherwise the caller is responsible for commiting.
767 @return: the database id for the given component
770 q = session.query(ContentFilename).filter_by(filename=filename)
773 ret = q.one().cafilename_id
774 except NoResultFound:
775 cf = ContentFilename()
776 cf.filename = filename
778 session.commit_or_flush()
779 ret = cf.cafilename_id
783 __all__.append('get_or_set_contents_file_id')
786 def get_contents(suite, overridetype, section=None, session=None):
788 Returns contents for a suite / overridetype combination, limiting
789 to a section if not None.
792 @param suite: Suite object
794 @type overridetype: OverrideType
795 @param overridetype: OverrideType object
797 @type section: Section
798 @param section: Optional section object to limit results to
800 @type session: SQLAlchemy
801 @param session: Optional SQL session object (a temporary one will be
802 generated if not supplied)
805 @return: ResultsProxy object set up to return tuples of (filename, section,
809 # find me all of the contents for a given suite
810 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
814 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
815 JOIN content_file_names n ON (c.filename=n.id)
816 JOIN binaries b ON (b.id=c.binary_pkg)
817 JOIN override o ON (o.package=b.package)
818 JOIN section s ON (s.id=o.section)
819 WHERE o.suite = :suiteid AND o.type = :overridetypeid
820 AND b.type=:overridetypename"""
822 vals = {'suiteid': suite.suite_id,
823 'overridetypeid': overridetype.overridetype_id,
824 'overridetypename': overridetype.overridetype}
826 if section is not None:
827 contents_q += " AND s.id = :sectionid"
828 vals['sectionid'] = section.section_id
830 contents_q += " ORDER BY fn"
832 return session.execute(contents_q, vals)
834 __all__.append('get_contents')
836 ################################################################################
838 class ContentFilepath(object):
839 def __init__(self, *args, **kwargs):
843 return '<ContentFilepath %s>' % self.filepath
845 __all__.append('ContentFilepath')
848 def get_or_set_contents_path_id(filepath, session=None):
850 Returns database id for given path.
852 If no matching file is found, a row is inserted.
854 @type filepath: string
855 @param filepath: The filepath
857 @type session: SQLAlchemy
858 @param session: Optional SQL session object (a temporary one will be
859 generated if not supplied). If not passed, a commit will be performed at
860 the end of the function, otherwise the caller is responsible for commiting.
863 @return: the database id for the given path
866 q = session.query(ContentFilepath).filter_by(filepath=filepath)
869 ret = q.one().cafilepath_id
870 except NoResultFound:
871 cf = ContentFilepath()
872 cf.filepath = filepath
874 session.commit_or_flush()
875 ret = cf.cafilepath_id
879 __all__.append('get_or_set_contents_path_id')
881 ################################################################################
883 class ContentAssociation(object):
884 def __init__(self, *args, **kwargs):
888 return '<ContentAssociation %s>' % self.ca_id
890 __all__.append('ContentAssociation')
892 def insert_content_paths(binary_id, fullpaths, session=None):
894 Make sure given path is associated with given binary id
897 @param binary_id: the id of the binary
898 @type fullpaths: list
899 @param fullpaths: the list of paths of the file being associated with the binary
900 @type session: SQLAlchemy session
901 @param session: Optional SQLAlchemy session. If this is passed, the caller
902 is responsible for ensuring a transaction has begun and committing the
903 results or rolling back based on the result code. If not passed, a commit
904 will be performed at the end of the function, otherwise the caller is
905 responsible for commiting.
907 @return: True upon success
912 session = DBConn().session()
917 def generate_path_dicts():
918 for fullpath in fullpaths:
919 if fullpath.startswith( './' ):
920 fullpath = fullpath[2:]
922 yield {'filename':fullpath, 'id': binary_id }
924 for d in generate_path_dicts():
925 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
934 traceback.print_exc()
936 # Only rollback if we set up the session ourself
943 __all__.append('insert_content_paths')
945 ################################################################################
947 class DSCFile(object):
948 def __init__(self, *args, **kwargs):
952 return '<DSCFile %s>' % self.dscfile_id
954 __all__.append('DSCFile')
957 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
959 Returns a list of DSCFiles which may be empty
961 @type dscfile_id: int (optional)
962 @param dscfile_id: the dscfile_id of the DSCFiles to find
964 @type source_id: int (optional)
965 @param source_id: the source id related to the DSCFiles to find
967 @type poolfile_id: int (optional)
968 @param poolfile_id: the poolfile id related to the DSCFiles to find
971 @return: Possibly empty list of DSCFiles
974 q = session.query(DSCFile)
976 if dscfile_id is not None:
977 q = q.filter_by(dscfile_id=dscfile_id)
979 if source_id is not None:
980 q = q.filter_by(source_id=source_id)
982 if poolfile_id is not None:
983 q = q.filter_by(poolfile_id=poolfile_id)
987 __all__.append('get_dscfiles')
989 ################################################################################
991 class ExternalOverride(ORMObject):
992 def __init__(self, *args, **kwargs):
996 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
998 __all__.append('ExternalOverride')
1000 ################################################################################
1002 class PoolFile(ORMObject):
1003 def __init__(self, filename = None, filesize = -1, \
1005 self.filename = filename
1006 self.filesize = filesize
1007 self.md5sum = md5sum
1011 session = DBConn().session().object_session(self)
1012 af = session.query(ArchiveFile).join(Archive) \
1013 .filter(ArchiveFile.file == self) \
1014 .order_by(Archive.tainted.desc()).first()
1018 def component(self):
1019 session = DBConn().session().object_session(self)
1020 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1021 .group_by(ArchiveFile.component_id).one()
1022 return session.query(Component).get(component_id)
1026 return os.path.basename(self.filename)
1028 def is_valid(self, filesize = -1, md5sum = None):
1029 return self.filesize == long(filesize) and self.md5sum == md5sum
1031 def properties(self):
1032 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1033 'sha256sum', 'source', 'binary', 'last_used']
1035 def not_null_constraints(self):
1036 return ['filename', 'md5sum']
1038 def identical_to(self, filename):
1040 compare size and hash with the given file
1043 @return: true if the given file has the same size and hash as this object; false otherwise
1045 st = os.stat(filename)
1046 if self.filesize != st.st_size:
1049 f = open(filename, "r")
1050 sha256sum = apt_pkg.sha256sum(f)
1051 if sha256sum != self.sha256sum:
1056 __all__.append('PoolFile')
1059 def get_poolfile_like_name(filename, session=None):
1061 Returns an array of PoolFile objects which are like the given name
1063 @type filename: string
1064 @param filename: the filename of the file to check against the DB
1067 @return: array of PoolFile objects
1070 # TODO: There must be a way of properly using bind parameters with %FOO%
1071 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1075 __all__.append('get_poolfile_like_name')
1077 ################################################################################
1079 class Fingerprint(ORMObject):
1080 def __init__(self, fingerprint = None):
1081 self.fingerprint = fingerprint
1083 def properties(self):
1084 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1087 def not_null_constraints(self):
1088 return ['fingerprint']
1090 __all__.append('Fingerprint')
1093 def get_fingerprint(fpr, session=None):
1095 Returns Fingerprint object for given fpr.
1098 @param fpr: The fpr to find / add
1100 @type session: SQLAlchemy
1101 @param session: Optional SQL session object (a temporary one will be
1102 generated if not supplied).
1105 @return: the Fingerprint object for the given fpr or None
1108 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1112 except NoResultFound:
1117 __all__.append('get_fingerprint')
1120 def get_or_set_fingerprint(fpr, session=None):
1122 Returns Fingerprint object for given fpr.
1124 If no matching fpr is found, a row is inserted.
1127 @param fpr: The fpr to find / add
1129 @type session: SQLAlchemy
1130 @param session: Optional SQL session object (a temporary one will be
1131 generated if not supplied). If not passed, a commit will be performed at
1132 the end of the function, otherwise the caller is responsible for commiting.
1133 A flush will be performed either way.
1136 @return: the Fingerprint object for the given fpr
1139 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1143 except NoResultFound:
1144 fingerprint = Fingerprint()
1145 fingerprint.fingerprint = fpr
1146 session.add(fingerprint)
1147 session.commit_or_flush()
1152 __all__.append('get_or_set_fingerprint')
1154 ################################################################################
1156 # Helper routine for Keyring class
1157 def get_ldap_name(entry):
1159 for k in ["cn", "mn", "sn"]:
1161 if ret and ret[0] != "" and ret[0] != "-":
1163 return " ".join(name)
1165 ################################################################################
1167 class Keyring(object):
1168 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1169 " --with-colons --fingerprint --fingerprint"
1174 def __init__(self, *args, **kwargs):
1178 return '<Keyring %s>' % self.keyring_name
1180 def de_escape_gpg_str(self, txt):
1181 esclist = re.split(r'(\\x..)', txt)
1182 for x in range(1,len(esclist),2):
1183 esclist[x] = "%c" % (int(esclist[x][2:],16))
1184 return "".join(esclist)
1186 def parse_address(self, uid):
1187 """parses uid and returns a tuple of real name and email address"""
1189 (name, address) = email.Utils.parseaddr(uid)
1190 name = re.sub(r"\s*[(].*[)]", "", name)
1191 name = self.de_escape_gpg_str(name)
1194 return (name, address)
1196 def load_keys(self, keyring):
1197 if not self.keyring_id:
1198 raise Exception('Must be initialized with database information')
1200 k = os.popen(self.gpg_invocation % keyring, "r")
1205 field = line.split(":")
1206 if field[0] == "pub":
1209 (name, addr) = self.parse_address(field[9])
1211 self.keys[key]["email"] = addr
1212 self.keys[key]["name"] = name
1213 self.keys[key]["fingerprints"] = []
1215 elif key and field[0] == "sub" and len(field) >= 12:
1216 signingkey = ("s" in field[11])
1217 elif key and field[0] == "uid":
1218 (name, addr) = self.parse_address(field[9])
1219 if "email" not in self.keys[key] and "@" in addr:
1220 self.keys[key]["email"] = addr
1221 self.keys[key]["name"] = name
1222 elif signingkey and field[0] == "fpr":
1223 self.keys[key]["fingerprints"].append(field[9])
1224 self.fpr_lookup[field[9]] = key
1226 def import_users_from_ldap(self, session):
1230 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1231 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1232 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1234 l = ldap.open(LDAPServer)
1237 # TODO: This should request a new context and use
1238 # connection-specific options (i.e. "l.set_option(...)")
1240 # Request a new TLS context. If there was already one, libldap
1241 # would not change the TLS options (like which CAs to trust).
1242 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1243 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1244 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1245 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1248 l.simple_bind_s("","")
1249 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1250 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1251 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1253 ldap_fin_uid_id = {}
1260 uid = entry["uid"][0]
1261 name = get_ldap_name(entry)
1262 fingerprints = entry["keyFingerPrint"]
1264 for f in fingerprints:
1265 key = self.fpr_lookup.get(f, None)
1266 if key not in self.keys:
1268 self.keys[key]["uid"] = uid
1272 keyid = get_or_set_uid(uid, session).uid_id
1273 byuid[keyid] = (uid, name)
1274 byname[uid] = (keyid, name)
1276 return (byname, byuid)
1278 def generate_users_from_keyring(self, format, session):
1282 for x in self.keys.keys():
1283 if "email" not in self.keys[x]:
1285 self.keys[x]["uid"] = format % "invalid-uid"
1287 uid = format % self.keys[x]["email"]
1288 keyid = get_or_set_uid(uid, session).uid_id
1289 byuid[keyid] = (uid, self.keys[x]["name"])
1290 byname[uid] = (keyid, self.keys[x]["name"])
1291 self.keys[x]["uid"] = uid
1294 uid = format % "invalid-uid"
1295 keyid = get_or_set_uid(uid, session).uid_id
1296 byuid[keyid] = (uid, "ungeneratable user id")
1297 byname[uid] = (keyid, "ungeneratable user id")
1299 return (byname, byuid)
1301 __all__.append('Keyring')
1304 def get_keyring(keyring, session=None):
1306 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1307 If C{keyring} already has an entry, simply return the existing Keyring
1309 @type keyring: string
1310 @param keyring: the keyring name
1313 @return: the Keyring object for this keyring
1316 q = session.query(Keyring).filter_by(keyring_name=keyring)
1320 except NoResultFound:
1323 __all__.append('get_keyring')
1326 def get_active_keyring_paths(session=None):
1329 @return: list of active keyring paths
1331 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1333 __all__.append('get_active_keyring_paths')
1336 def get_primary_keyring_path(session=None):
1338 Get the full path to the highest priority active keyring
1341 @return: path to the active keyring with the highest priority or None if no
1342 keyring is configured
1344 keyrings = get_active_keyring_paths()
1346 if len(keyrings) > 0:
1351 __all__.append('get_primary_keyring_path')
1353 ################################################################################
1355 class DBChange(object):
1356 def __init__(self, *args, **kwargs):
1360 return '<DBChange %s>' % self.changesname
1362 __all__.append('DBChange')
1365 def get_dbchange(filename, session=None):
1367 returns DBChange object for given C{filename}.
1369 @type filename: string
1370 @param filename: the name of the file
1372 @type session: Session
1373 @param session: Optional SQLA session object (a temporary one will be
1374 generated if not supplied)
1377 @return: DBChange object for the given filename (C{None} if not present)
1380 q = session.query(DBChange).filter_by(changesname=filename)
1384 except NoResultFound:
1387 __all__.append('get_dbchange')
1389 ################################################################################
1391 class Maintainer(ORMObject):
1392 def __init__(self, name = None):
1395 def properties(self):
1396 return ['name', 'maintainer_id']
1398 def not_null_constraints(self):
1401 def get_split_maintainer(self):
1402 if not hasattr(self, 'name') or self.name is None:
1403 return ('', '', '', '')
1405 return fix_maintainer(self.name.strip())
1407 __all__.append('Maintainer')
1410 def get_or_set_maintainer(name, session=None):
1412 Returns Maintainer object for given maintainer name.
1414 If no matching maintainer name is found, a row is inserted.
1417 @param name: The maintainer name to add
1419 @type session: SQLAlchemy
1420 @param session: Optional SQL session object (a temporary one will be
1421 generated if not supplied). If not passed, a commit will be performed at
1422 the end of the function, otherwise the caller is responsible for commiting.
1423 A flush will be performed either way.
1426 @return: the Maintainer object for the given maintainer
1429 q = session.query(Maintainer).filter_by(name=name)
1432 except NoResultFound:
1433 maintainer = Maintainer()
1434 maintainer.name = name
1435 session.add(maintainer)
1436 session.commit_or_flush()
1441 __all__.append('get_or_set_maintainer')
1444 def get_maintainer(maintainer_id, session=None):
1446 Return the name of the maintainer behind C{maintainer_id} or None if that
1447 maintainer_id is invalid.
1449 @type maintainer_id: int
1450 @param maintainer_id: the id of the maintainer
1453 @return: the Maintainer with this C{maintainer_id}
1456 return session.query(Maintainer).get(maintainer_id)
1458 __all__.append('get_maintainer')
1460 ################################################################################
1462 class NewComment(object):
1463 def __init__(self, *args, **kwargs):
1467 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1469 __all__.append('NewComment')
1472 def has_new_comment(policy_queue, package, version, session=None):
1474 Returns true if the given combination of C{package}, C{version} has a comment.
1476 @type package: string
1477 @param package: name of the package
1479 @type version: string
1480 @param version: package version
1482 @type session: Session
1483 @param session: Optional SQLA session object (a temporary one will be
1484 generated if not supplied)
1490 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1491 q = q.filter_by(package=package)
1492 q = q.filter_by(version=version)
1494 return bool(q.count() > 0)
1496 __all__.append('has_new_comment')
1499 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1501 Returns (possibly empty) list of NewComment objects for the given
1504 @type package: string (optional)
1505 @param package: name of the package
1507 @type version: string (optional)
1508 @param version: package version
1510 @type comment_id: int (optional)
1511 @param comment_id: An id of a comment
1513 @type session: Session
1514 @param session: Optional SQLA session object (a temporary one will be
1515 generated if not supplied)
1518 @return: A (possibly empty) list of NewComment objects will be returned
1521 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1522 if package is not None: q = q.filter_by(package=package)
1523 if version is not None: q = q.filter_by(version=version)
1524 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1528 __all__.append('get_new_comments')
1530 ################################################################################
1532 class Override(ORMObject):
1533 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1534 section = None, priority = None):
1535 self.package = package
1537 self.component = component
1538 self.overridetype = overridetype
1539 self.section = section
1540 self.priority = priority
1542 def properties(self):
1543 return ['package', 'suite', 'component', 'overridetype', 'section', \
1546 def not_null_constraints(self):
1547 return ['package', 'suite', 'component', 'overridetype', 'section']
1549 __all__.append('Override')
1552 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1554 Returns Override object for the given parameters
1556 @type package: string
1557 @param package: The name of the package
1559 @type suite: string, list or None
1560 @param suite: The name of the suite (or suites if a list) to limit to. If
1561 None, don't limit. Defaults to None.
1563 @type component: string, list or None
1564 @param component: The name of the component (or components if a list) to
1565 limit to. If None, don't limit. Defaults to None.
1567 @type overridetype: string, list or None
1568 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1569 limit to. If None, don't limit. Defaults to None.
1571 @type session: Session
1572 @param session: Optional SQLA session object (a temporary one will be
1573 generated if not supplied)
1576 @return: A (possibly empty) list of Override objects will be returned
1579 q = session.query(Override)
1580 q = q.filter_by(package=package)
1582 if suite is not None:
1583 if not isinstance(suite, list): suite = [suite]
1584 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1586 if component is not None:
1587 if not isinstance(component, list): component = [component]
1588 q = q.join(Component).filter(Component.component_name.in_(component))
1590 if overridetype is not None:
1591 if not isinstance(overridetype, list): overridetype = [overridetype]
1592 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1596 __all__.append('get_override')
1599 ################################################################################
1601 class OverrideType(ORMObject):
1602 def __init__(self, overridetype = None):
1603 self.overridetype = overridetype
1605 def properties(self):
1606 return ['overridetype', 'overridetype_id', 'overrides_count']
1608 def not_null_constraints(self):
1609 return ['overridetype']
1611 __all__.append('OverrideType')
1614 def get_override_type(override_type, session=None):
1616 Returns OverrideType object for given C{override type}.
1618 @type override_type: string
1619 @param override_type: The name of the override type
1621 @type session: Session
1622 @param session: Optional SQLA session object (a temporary one will be
1623 generated if not supplied)
1626 @return: the database id for the given override type
1629 q = session.query(OverrideType).filter_by(overridetype=override_type)
1633 except NoResultFound:
1636 __all__.append('get_override_type')
1638 ################################################################################
1640 class PolicyQueue(object):
1641 def __init__(self, *args, **kwargs):
1645 return '<PolicyQueue %s>' % self.queue_name
1647 __all__.append('PolicyQueue')
1650 def get_policy_queue(queuename, session=None):
1652 Returns PolicyQueue object for given C{queue name}
1654 @type queuename: string
1655 @param queuename: The name of the queue
1657 @type session: Session
1658 @param session: Optional SQLA session object (a temporary one will be
1659 generated if not supplied)
1662 @return: PolicyQueue object for the given queue
1665 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1669 except NoResultFound:
1672 __all__.append('get_policy_queue')
1674 ################################################################################
1676 class PolicyQueueUpload(object):
1677 def __cmp__(self, other):
1678 ret = cmp(self.changes.source, other.changes.source)
1680 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1682 if self.source is not None and other.source is None:
1684 elif self.source is None and other.source is not None:
1687 ret = cmp(self.changes.changesname, other.changes.changesname)
1690 __all__.append('PolicyQueueUpload')
1692 ################################################################################
1694 class PolicyQueueByhandFile(object):
1697 __all__.append('PolicyQueueByhandFile')
1699 ################################################################################
1701 class Priority(ORMObject):
1702 def __init__(self, priority = None, level = None):
1703 self.priority = priority
1706 def properties(self):
1707 return ['priority', 'priority_id', 'level', 'overrides_count']
1709 def not_null_constraints(self):
1710 return ['priority', 'level']
1712 def __eq__(self, val):
1713 if isinstance(val, str):
1714 return (self.priority == val)
1715 # This signals to use the normal comparison operator
1716 return NotImplemented
1718 def __ne__(self, val):
1719 if isinstance(val, str):
1720 return (self.priority != val)
1721 # This signals to use the normal comparison operator
1722 return NotImplemented
1724 __all__.append('Priority')
1727 def get_priority(priority, session=None):
1729 Returns Priority object for given C{priority name}.
1731 @type priority: string
1732 @param priority: The name of the priority
1734 @type session: Session
1735 @param session: Optional SQLA session object (a temporary one will be
1736 generated if not supplied)
1739 @return: Priority object for the given priority
1742 q = session.query(Priority).filter_by(priority=priority)
1746 except NoResultFound:
1749 __all__.append('get_priority')
1752 def get_priorities(session=None):
1754 Returns dictionary of priority names -> id mappings
1756 @type session: Session
1757 @param session: Optional SQL session object (a temporary one will be
1758 generated if not supplied)
1761 @return: dictionary of priority names -> id mappings
1765 q = session.query(Priority)
1767 ret[x.priority] = x.priority_id
1771 __all__.append('get_priorities')
1773 ################################################################################
1775 class Section(ORMObject):
1776 def __init__(self, section = None):
1777 self.section = section
1779 def properties(self):
1780 return ['section', 'section_id', 'overrides_count']
1782 def not_null_constraints(self):
1785 def __eq__(self, val):
1786 if isinstance(val, str):
1787 return (self.section == val)
1788 # This signals to use the normal comparison operator
1789 return NotImplemented
1791 def __ne__(self, val):
1792 if isinstance(val, str):
1793 return (self.section != val)
1794 # This signals to use the normal comparison operator
1795 return NotImplemented
1797 __all__.append('Section')
1800 def get_section(section, session=None):
1802 Returns Section object for given C{section name}.
1804 @type section: string
1805 @param section: The name of the section
1807 @type session: Session
1808 @param session: Optional SQLA session object (a temporary one will be
1809 generated if not supplied)
1812 @return: Section object for the given section name
1815 q = session.query(Section).filter_by(section=section)
1819 except NoResultFound:
1822 __all__.append('get_section')
1825 def get_sections(session=None):
1827 Returns dictionary of section names -> id mappings
1829 @type session: Session
1830 @param session: Optional SQL session object (a temporary one will be
1831 generated if not supplied)
1834 @return: dictionary of section names -> id mappings
1838 q = session.query(Section)
1840 ret[x.section] = x.section_id
1844 __all__.append('get_sections')
1846 ################################################################################
1848 class SignatureHistory(ORMObject):
1850 def from_signed_file(cls, signed_file):
1851 """signature history entry from signed file
1853 @type signed_file: L{daklib.gpg.SignedFile}
1854 @param signed_file: signed file
1856 @rtype: L{SignatureHistory}
1859 self.fingerprint = signed_file.primary_fingerprint
1860 self.signature_timestamp = signed_file.signature_timestamp
1861 self.contents_sha1 = signed_file.contents_sha1()
1864 __all__.append('SignatureHistory')
1866 ################################################################################
1868 class SrcContents(ORMObject):
1869 def __init__(self, file = None, source = None):
1871 self.source = source
1873 def properties(self):
1874 return ['file', 'source']
1876 __all__.append('SrcContents')
1878 ################################################################################
1880 from debian.debfile import Deb822
1882 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1883 class Dak822(Deb822):
1884 def _internal_parser(self, sequence, fields=None):
1885 # The key is non-whitespace, non-colon characters before any colon.
1886 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1887 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1888 multi = re.compile(key_part + r"$")
1889 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1891 wanted_field = lambda f: fields is None or f in fields
1893 if isinstance(sequence, basestring):
1894 sequence = sequence.splitlines()
1898 for line in self.gpg_stripped_paragraph(sequence):
1899 m = single.match(line)
1902 self[curkey] = content
1904 if not wanted_field(m.group('key')):
1908 curkey = m.group('key')
1909 content = m.group('data')
1912 m = multi.match(line)
1915 self[curkey] = content
1917 if not wanted_field(m.group('key')):
1921 curkey = m.group('key')
1925 m = multidata.match(line)
1927 content += '\n' + line # XXX not m.group('data')?
1931 self[curkey] = content
1934 class DBSource(ORMObject):
1935 def __init__(self, source = None, version = None, maintainer = None, \
1936 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1937 self.source = source
1938 self.version = version
1939 self.maintainer = maintainer
1940 self.changedby = changedby
1941 self.poolfile = poolfile
1942 self.install_date = install_date
1943 self.fingerprint = fingerprint
1947 return self.source_id
1949 def properties(self):
1950 return ['source', 'source_id', 'maintainer', 'changedby', \
1951 'fingerprint', 'poolfile', 'version', 'suites_count', \
1952 'install_date', 'binaries_count', 'uploaders_count']
1954 def not_null_constraints(self):
1955 return ['source', 'version', 'install_date', 'maintainer', \
1956 'changedby', 'poolfile']
1958 def read_control_fields(self):
1960 Reads the control information from a dsc
1963 @return: fields is the dsc information in a dictionary form
1965 fullpath = self.poolfile.fullpath
1966 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1969 metadata = association_proxy('key', 'value')
1971 def scan_contents(self):
1973 Returns a set of names for non directories. The path names are
1974 normalized after converting them from either utf-8 or iso8859-1
1977 fullpath = self.poolfile.fullpath
1978 from daklib.contents import UnpackedSource
1979 unpacked = UnpackedSource(fullpath)
1981 for name in unpacked.get_all_filenames():
1982 # enforce proper utf-8 encoding
1984 name.decode('utf-8')
1985 except UnicodeDecodeError:
1986 name = name.decode('iso8859-1').encode('utf-8')
1990 __all__.append('DBSource')
1993 def source_exists(source, source_version, suites = ["any"], session=None):
1995 Ensure that source exists somewhere in the archive for the binary
1996 upload being processed.
1997 1. exact match => 1.0-3
1998 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2000 @type source: string
2001 @param source: source name
2003 @type source_version: string
2004 @param source_version: expected source version
2007 @param suites: list of suites to check in, default I{any}
2009 @type session: Session
2010 @param session: Optional SQLA session object (a temporary one will be
2011 generated if not supplied)
2014 @return: returns 1 if a source with expected version is found, otherwise 0
2021 from daklib.regexes import re_bin_only_nmu
2022 orig_source_version = re_bin_only_nmu.sub('', source_version)
2024 for suite in suites:
2025 q = session.query(DBSource).filter_by(source=source). \
2026 filter(DBSource.version.in_([source_version, orig_source_version]))
2028 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2029 s = get_suite(suite, session)
2031 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2032 considered_suites = [ vc.reference for vc in enhances_vcs ]
2033 considered_suites.append(s)
2035 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2040 # No source found so return not ok
2045 __all__.append('source_exists')
2048 def get_suites_source_in(source, session=None):
2050 Returns list of Suite objects which given C{source} name is in
2053 @param source: DBSource package name to search for
2056 @return: list of Suite objects for the given source
2059 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2061 __all__.append('get_suites_source_in')
2064 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2066 Returns list of DBSource objects for given C{source} name and other parameters
2069 @param source: DBSource package name to search for
2071 @type version: str or None
2072 @param version: DBSource version name to search for or None if not applicable
2074 @type dm_upload_allowed: bool
2075 @param dm_upload_allowed: If None, no effect. If True or False, only
2076 return packages with that dm_upload_allowed setting
2078 @type session: Session
2079 @param session: Optional SQL session object (a temporary one will be
2080 generated if not supplied)
2083 @return: list of DBSource objects for the given name (may be empty)
2086 q = session.query(DBSource).filter_by(source=source)
2088 if version is not None:
2089 q = q.filter_by(version=version)
2091 if dm_upload_allowed is not None:
2092 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2096 __all__.append('get_sources_from_name')
2098 # FIXME: This function fails badly if it finds more than 1 source package and
2099 # its implementation is trivial enough to be inlined.
2101 def get_source_in_suite(source, suite, session=None):
2103 Returns a DBSource object for a combination of C{source} and C{suite}.
2105 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2106 - B{suite} - a suite name, eg. I{unstable}
2108 @type source: string
2109 @param source: source package name
2112 @param suite: the suite name
2115 @return: the version for I{source} in I{suite}
2119 q = get_suite(suite, session).get_sources(source)
2122 except NoResultFound:
2125 __all__.append('get_source_in_suite')
2128 def import_metadata_into_db(obj, session=None):
2130 This routine works on either DBBinary or DBSource objects and imports
2131 their metadata into the database
2133 fields = obj.read_control_fields()
2134 for k in fields.keys():
2137 val = str(fields[k])
2138 except UnicodeEncodeError:
2139 # Fall back to UTF-8
2141 val = fields[k].encode('utf-8')
2142 except UnicodeEncodeError:
2143 # Finally try iso8859-1
2144 val = fields[k].encode('iso8859-1')
2145 # Otherwise we allow the exception to percolate up and we cause
2146 # a reject as someone is playing silly buggers
2148 obj.metadata[get_or_set_metadatakey(k, session)] = val
2150 session.commit_or_flush()
2152 __all__.append('import_metadata_into_db')
2154 ################################################################################
2156 class SrcFormat(object):
2157 def __init__(self, *args, **kwargs):
2161 return '<SrcFormat %s>' % (self.format_name)
2163 __all__.append('SrcFormat')
2165 ################################################################################
2167 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2168 ('SuiteID', 'suite_id'),
2169 ('Version', 'version'),
2170 ('Origin', 'origin'),
2172 ('Description', 'description'),
2173 ('Untouchable', 'untouchable'),
2174 ('Announce', 'announce'),
2175 ('Codename', 'codename'),
2176 ('OverrideCodename', 'overridecodename'),
2177 ('ValidTime', 'validtime'),
2178 ('Priority', 'priority'),
2179 ('NotAutomatic', 'notautomatic'),
2180 ('CopyChanges', 'copychanges'),
2181 ('OverrideSuite', 'overridesuite')]
2183 # Why the heck don't we have any UNIQUE constraints in table suite?
2184 # TODO: Add UNIQUE constraints for appropriate columns.
2185 class Suite(ORMObject):
2186 def __init__(self, suite_name = None, version = None):
2187 self.suite_name = suite_name
2188 self.version = version
2190 def properties(self):
2191 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2194 def not_null_constraints(self):
2195 return ['suite_name']
2197 def __eq__(self, val):
2198 if isinstance(val, str):
2199 return (self.suite_name == val)
2200 # This signals to use the normal comparison operator
2201 return NotImplemented
2203 def __ne__(self, val):
2204 if isinstance(val, str):
2205 return (self.suite_name != val)
2206 # This signals to use the normal comparison operator
2207 return NotImplemented
2211 for disp, field in SUITE_FIELDS:
2212 val = getattr(self, field, None)
2214 ret.append("%s: %s" % (disp, val))
2216 return "\n".join(ret)
2218 def get_architectures(self, skipsrc=False, skipall=False):
2220 Returns list of Architecture objects
2222 @type skipsrc: boolean
2223 @param skipsrc: Whether to skip returning the 'source' architecture entry
2226 @type skipall: boolean
2227 @param skipall: Whether to skip returning the 'all' architecture entry
2231 @return: list of Architecture objects for the given name (may be empty)
2234 q = object_session(self).query(Architecture).with_parent(self)
2236 q = q.filter(Architecture.arch_string != 'source')
2238 q = q.filter(Architecture.arch_string != 'all')
2239 return q.order_by(Architecture.arch_string).all()
2241 def get_sources(self, source):
2243 Returns a query object representing DBSource that is part of C{suite}.
2245 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2247 @type source: string
2248 @param source: source package name
2250 @rtype: sqlalchemy.orm.query.Query
2251 @return: a query of DBSource
2255 session = object_session(self)
2256 return session.query(DBSource).filter_by(source = source). \
2259 def get_overridesuite(self):
2260 if self.overridesuite is None:
2263 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2267 return os.path.join(self.archive.path, 'dists', self.suite_name)
2269 __all__.append('Suite')
2272 def get_suite(suite, session=None):
2274 Returns Suite object for given C{suite name}.
2277 @param suite: The name of the suite
2279 @type session: Session
2280 @param session: Optional SQLA session object (a temporary one will be
2281 generated if not supplied)
2284 @return: Suite object for the requested suite name (None if not present)
2287 q = session.query(Suite).filter_by(suite_name=suite)
2291 except NoResultFound:
2294 __all__.append('get_suite')
2296 ################################################################################
2299 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2301 Returns list of Architecture objects for given C{suite} name. The list is
2302 empty if suite does not exist.
2305 @param suite: Suite name to search for
2307 @type skipsrc: boolean
2308 @param skipsrc: Whether to skip returning the 'source' architecture entry
2311 @type skipall: boolean
2312 @param skipall: Whether to skip returning the 'all' architecture entry
2315 @type session: Session
2316 @param session: Optional SQL session object (a temporary one will be
2317 generated if not supplied)
2320 @return: list of Architecture objects for the given name (may be empty)
2324 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2325 except AttributeError:
2328 __all__.append('get_suite_architectures')
2330 ################################################################################
2332 class Uid(ORMObject):
2333 def __init__(self, uid = None, name = None):
2337 def __eq__(self, val):
2338 if isinstance(val, str):
2339 return (self.uid == val)
2340 # This signals to use the normal comparison operator
2341 return NotImplemented
2343 def __ne__(self, val):
2344 if isinstance(val, str):
2345 return (self.uid != val)
2346 # This signals to use the normal comparison operator
2347 return NotImplemented
2349 def properties(self):
2350 return ['uid', 'name', 'fingerprint']
2352 def not_null_constraints(self):
2355 __all__.append('Uid')
2358 def get_or_set_uid(uidname, session=None):
2360 Returns uid object for given uidname.
2362 If no matching uidname is found, a row is inserted.
2364 @type uidname: string
2365 @param uidname: The uid to add
2367 @type session: SQLAlchemy
2368 @param session: Optional SQL session object (a temporary one will be
2369 generated if not supplied). If not passed, a commit will be performed at
2370 the end of the function, otherwise the caller is responsible for commiting.
2373 @return: the uid object for the given uidname
2376 q = session.query(Uid).filter_by(uid=uidname)
2380 except NoResultFound:
2384 session.commit_or_flush()
2389 __all__.append('get_or_set_uid')
2392 def get_uid_from_fingerprint(fpr, session=None):
2393 q = session.query(Uid)
2394 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2398 except NoResultFound:
2401 __all__.append('get_uid_from_fingerprint')
2403 ################################################################################
2405 class MetadataKey(ORMObject):
2406 def __init__(self, key = None):
2409 def properties(self):
2412 def not_null_constraints(self):
2415 __all__.append('MetadataKey')
2418 def get_or_set_metadatakey(keyname, session=None):
2420 Returns MetadataKey object for given uidname.
2422 If no matching keyname is found, a row is inserted.
2424 @type uidname: string
2425 @param uidname: The keyname to add
2427 @type session: SQLAlchemy
2428 @param session: Optional SQL session object (a temporary one will be
2429 generated if not supplied). If not passed, a commit will be performed at
2430 the end of the function, otherwise the caller is responsible for commiting.
2433 @return: the metadatakey object for the given keyname
2436 q = session.query(MetadataKey).filter_by(key=keyname)
2440 except NoResultFound:
2441 ret = MetadataKey(keyname)
2443 session.commit_or_flush()
2447 __all__.append('get_or_set_metadatakey')
2449 ################################################################################
2451 class BinaryMetadata(ORMObject):
2452 def __init__(self, key = None, value = None, binary = None):
2455 self.binary = binary
2457 def properties(self):
2458 return ['binary', 'key', 'value']
2460 def not_null_constraints(self):
2463 __all__.append('BinaryMetadata')
2465 ################################################################################
2467 class SourceMetadata(ORMObject):
2468 def __init__(self, key = None, value = None, source = None):
2471 self.source = source
2473 def properties(self):
2474 return ['source', 'key', 'value']
2476 def not_null_constraints(self):
2479 __all__.append('SourceMetadata')
2481 ################################################################################
2483 class VersionCheck(ORMObject):
2484 def __init__(self, *args, **kwargs):
2487 def properties(self):
2488 #return ['suite_id', 'check', 'reference_id']
2491 def not_null_constraints(self):
2492 return ['suite', 'check', 'reference']
2494 __all__.append('VersionCheck')
2497 def get_version_checks(suite_name, check = None, session = None):
2498 suite = get_suite(suite_name, session)
2500 # Make sure that what we return is iterable so that list comprehensions
2501 # involving this don't cause a traceback
2503 q = session.query(VersionCheck).filter_by(suite=suite)
2505 q = q.filter_by(check=check)
2508 __all__.append('get_version_checks')
2510 ################################################################################
2512 class DBConn(object):
2514 database module init.
2518 def __init__(self, *args, **kwargs):
2519 self.__dict__ = self.__shared_state
2521 if not getattr(self, 'initialised', False):
2522 self.initialised = True
2523 self.debug = kwargs.has_key('debug')
2526 def __setuptables(self):
2529 'acl_architecture_map',
2530 'acl_fingerprint_map',
2537 'binaries_metadata',
2545 'external_overrides',
2546 'extra_src_references',
2548 'files_archive_map',
2554 # TODO: the maintainer column in table override should be removed.
2558 'policy_queue_upload',
2559 'policy_queue_upload_binaries_map',
2560 'policy_queue_byhand_file',
2563 'signature_history',
2572 'suite_architectures',
2573 'suite_build_queue_copy',
2574 'suite_src_formats',
2580 'almost_obsolete_all_associations',
2581 'almost_obsolete_src_associations',
2582 'any_associations_source',
2583 'bin_associations_binaries',
2584 'binaries_suite_arch',
2587 'newest_all_associations',
2588 'newest_any_associations',
2590 'newest_src_association',
2591 'obsolete_all_associations',
2592 'obsolete_any_associations',
2593 'obsolete_any_by_all_associations',
2594 'obsolete_src_associations',
2596 'src_associations_bin',
2597 'src_associations_src',
2598 'suite_arch_by_name',
2601 for table_name in tables:
2602 table = Table(table_name, self.db_meta, \
2603 autoload=True, useexisting=True)
2604 setattr(self, 'tbl_%s' % table_name, table)
2606 for view_name in views:
2607 view = Table(view_name, self.db_meta, autoload=True)
2608 setattr(self, 'view_%s' % view_name, view)
2610 def __setupmappers(self):
2611 mapper(Architecture, self.tbl_architecture,
2612 properties = dict(arch_id = self.tbl_architecture.c.id,
2613 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2614 order_by=self.tbl_suite.c.suite_name,
2615 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2616 extension = validator)
2618 mapper(ACL, self.tbl_acl,
2620 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2621 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2622 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2623 per_source = relation(ACLPerSource, collection_class=set),
2626 mapper(ACLPerSource, self.tbl_acl_per_source,
2628 acl = relation(ACL),
2629 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2630 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2633 mapper(Archive, self.tbl_archive,
2634 properties = dict(archive_id = self.tbl_archive.c.id,
2635 archive_name = self.tbl_archive.c.name))
2637 mapper(ArchiveFile, self.tbl_files_archive_map,
2638 properties = dict(archive = relation(Archive, backref='files'),
2639 component = relation(Component),
2640 file = relation(PoolFile, backref='archives')))
2642 mapper(BuildQueue, self.tbl_build_queue,
2643 properties = dict(queue_id = self.tbl_build_queue.c.id,
2644 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2646 mapper(DBBinary, self.tbl_binaries,
2647 properties = dict(binary_id = self.tbl_binaries.c.id,
2648 package = self.tbl_binaries.c.package,
2649 version = self.tbl_binaries.c.version,
2650 maintainer_id = self.tbl_binaries.c.maintainer,
2651 maintainer = relation(Maintainer),
2652 source_id = self.tbl_binaries.c.source,
2653 source = relation(DBSource, backref='binaries'),
2654 arch_id = self.tbl_binaries.c.architecture,
2655 architecture = relation(Architecture),
2656 poolfile_id = self.tbl_binaries.c.file,
2657 poolfile = relation(PoolFile),
2658 binarytype = self.tbl_binaries.c.type,
2659 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2660 fingerprint = relation(Fingerprint),
2661 install_date = self.tbl_binaries.c.install_date,
2662 suites = relation(Suite, secondary=self.tbl_bin_associations,
2663 backref=backref('binaries', lazy='dynamic')),
2664 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2665 backref=backref('extra_binary_references', lazy='dynamic')),
2666 key = relation(BinaryMetadata, cascade='all',
2667 collection_class=attribute_mapped_collection('key'))),
2668 extension = validator)
2670 mapper(Component, self.tbl_component,
2671 properties = dict(component_id = self.tbl_component.c.id,
2672 component_name = self.tbl_component.c.name,
2673 suites = relation(Suite, secondary=self.tbl_component_suite)),
2674 extension = validator)
2676 mapper(DBConfig, self.tbl_config,
2677 properties = dict(config_id = self.tbl_config.c.id))
2679 mapper(DSCFile, self.tbl_dsc_files,
2680 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2681 source_id = self.tbl_dsc_files.c.source,
2682 source = relation(DBSource),
2683 poolfile_id = self.tbl_dsc_files.c.file,
2684 poolfile = relation(PoolFile)))
2686 mapper(ExternalOverride, self.tbl_external_overrides,
2688 suite_id = self.tbl_external_overrides.c.suite,
2689 suite = relation(Suite),
2690 component_id = self.tbl_external_overrides.c.component,
2691 component = relation(Component)))
2693 mapper(PoolFile, self.tbl_files,
2694 properties = dict(file_id = self.tbl_files.c.id,
2695 filesize = self.tbl_files.c.size),
2696 extension = validator)
2698 mapper(Fingerprint, self.tbl_fingerprint,
2699 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2700 uid_id = self.tbl_fingerprint.c.uid,
2701 uid = relation(Uid),
2702 keyring_id = self.tbl_fingerprint.c.keyring,
2703 keyring = relation(Keyring),
2704 acl = relation(ACL)),
2705 extension = validator)
2707 mapper(Keyring, self.tbl_keyrings,
2708 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2709 keyring_id = self.tbl_keyrings.c.id,
2710 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2712 mapper(DBChange, self.tbl_changes,
2713 properties = dict(change_id = self.tbl_changes.c.id,
2714 seen = self.tbl_changes.c.seen,
2715 source = self.tbl_changes.c.source,
2716 binaries = self.tbl_changes.c.binaries,
2717 architecture = self.tbl_changes.c.architecture,
2718 distribution = self.tbl_changes.c.distribution,
2719 urgency = self.tbl_changes.c.urgency,
2720 maintainer = self.tbl_changes.c.maintainer,
2721 changedby = self.tbl_changes.c.changedby,
2722 date = self.tbl_changes.c.date,
2723 version = self.tbl_changes.c.version))
2725 mapper(Maintainer, self.tbl_maintainer,
2726 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2727 maintains_sources = relation(DBSource, backref='maintainer',
2728 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2729 changed_sources = relation(DBSource, backref='changedby',
2730 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2731 extension = validator)
2733 mapper(NewComment, self.tbl_new_comments,
2734 properties = dict(comment_id = self.tbl_new_comments.c.id,
2735 policy_queue = relation(PolicyQueue)))
2737 mapper(Override, self.tbl_override,
2738 properties = dict(suite_id = self.tbl_override.c.suite,
2739 suite = relation(Suite, \
2740 backref=backref('overrides', lazy='dynamic')),
2741 package = self.tbl_override.c.package,
2742 component_id = self.tbl_override.c.component,
2743 component = relation(Component, \
2744 backref=backref('overrides', lazy='dynamic')),
2745 priority_id = self.tbl_override.c.priority,
2746 priority = relation(Priority, \
2747 backref=backref('overrides', lazy='dynamic')),
2748 section_id = self.tbl_override.c.section,
2749 section = relation(Section, \
2750 backref=backref('overrides', lazy='dynamic')),
2751 overridetype_id = self.tbl_override.c.type,
2752 overridetype = relation(OverrideType, \
2753 backref=backref('overrides', lazy='dynamic'))))
2755 mapper(OverrideType, self.tbl_override_type,
2756 properties = dict(overridetype = self.tbl_override_type.c.type,
2757 overridetype_id = self.tbl_override_type.c.id))
2759 mapper(PolicyQueue, self.tbl_policy_queue,
2760 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2761 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2763 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2765 changes = relation(DBChange),
2766 policy_queue = relation(PolicyQueue, backref='uploads'),
2767 target_suite = relation(Suite),
2768 source = relation(DBSource),
2769 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2772 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2774 upload = relation(PolicyQueueUpload, backref='byhand'),
2778 mapper(Priority, self.tbl_priority,
2779 properties = dict(priority_id = self.tbl_priority.c.id))
2781 mapper(Section, self.tbl_section,
2782 properties = dict(section_id = self.tbl_section.c.id,
2783 section=self.tbl_section.c.section))
2785 mapper(SignatureHistory, self.tbl_signature_history)
2787 mapper(DBSource, self.tbl_source,
2788 properties = dict(source_id = self.tbl_source.c.id,
2789 version = self.tbl_source.c.version,
2790 maintainer_id = self.tbl_source.c.maintainer,
2791 poolfile_id = self.tbl_source.c.file,
2792 poolfile = relation(PoolFile),
2793 fingerprint_id = self.tbl_source.c.sig_fpr,
2794 fingerprint = relation(Fingerprint),
2795 changedby_id = self.tbl_source.c.changedby,
2796 srcfiles = relation(DSCFile,
2797 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2798 suites = relation(Suite, secondary=self.tbl_src_associations,
2799 backref=backref('sources', lazy='dynamic')),
2800 uploaders = relation(Maintainer,
2801 secondary=self.tbl_src_uploaders),
2802 key = relation(SourceMetadata, cascade='all',
2803 collection_class=attribute_mapped_collection('key'))),
2804 extension = validator)
2806 mapper(SrcFormat, self.tbl_src_format,
2807 properties = dict(src_format_id = self.tbl_src_format.c.id,
2808 format_name = self.tbl_src_format.c.format_name))
2810 mapper(Suite, self.tbl_suite,
2811 properties = dict(suite_id = self.tbl_suite.c.id,
2812 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2813 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2814 copy_queues = relation(BuildQueue,
2815 secondary=self.tbl_suite_build_queue_copy),
2816 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2817 backref=backref('suites', lazy='dynamic')),
2818 archive = relation(Archive, backref='suites'),
2819 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2820 components = relation(Component, secondary=self.tbl_component_suite,
2821 order_by=self.tbl_component.c.ordering,
2822 backref=backref('suite'))),
2823 extension = validator)
2825 mapper(Uid, self.tbl_uid,
2826 properties = dict(uid_id = self.tbl_uid.c.id,
2827 fingerprint = relation(Fingerprint)),
2828 extension = validator)
2830 mapper(BinContents, self.tbl_bin_contents,
2832 binary = relation(DBBinary,
2833 backref=backref('contents', lazy='dynamic', cascade='all')),
2834 file = self.tbl_bin_contents.c.file))
2836 mapper(SrcContents, self.tbl_src_contents,
2838 source = relation(DBSource,
2839 backref=backref('contents', lazy='dynamic', cascade='all')),
2840 file = self.tbl_src_contents.c.file))
2842 mapper(MetadataKey, self.tbl_metadata_keys,
2844 key_id = self.tbl_metadata_keys.c.key_id,
2845 key = self.tbl_metadata_keys.c.key))
2847 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2849 binary_id = self.tbl_binaries_metadata.c.bin_id,
2850 binary = relation(DBBinary),
2851 key_id = self.tbl_binaries_metadata.c.key_id,
2852 key = relation(MetadataKey),
2853 value = self.tbl_binaries_metadata.c.value))
2855 mapper(SourceMetadata, self.tbl_source_metadata,
2857 source_id = self.tbl_source_metadata.c.src_id,
2858 source = relation(DBSource),
2859 key_id = self.tbl_source_metadata.c.key_id,
2860 key = relation(MetadataKey),
2861 value = self.tbl_source_metadata.c.value))
2863 mapper(VersionCheck, self.tbl_version_check,
2865 suite_id = self.tbl_version_check.c.suite,
2866 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2867 reference_id = self.tbl_version_check.c.reference,
2868 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2870 ## Connection functions
2871 def __createconn(self):
2872 from config import Config
2874 if cnf.has_key("DB::Service"):
2875 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2876 elif cnf.has_key("DB::Host"):
2878 connstr = "postgresql://%s" % cnf["DB::Host"]
2879 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2880 connstr += ":%s" % cnf["DB::Port"]
2881 connstr += "/%s" % cnf["DB::Name"]
2884 connstr = "postgresql:///%s" % cnf["DB::Name"]
2885 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2886 connstr += "?port=%s" % cnf["DB::Port"]
2888 engine_args = { 'echo': self.debug }
2889 if cnf.has_key('DB::PoolSize'):
2890 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2891 if cnf.has_key('DB::MaxOverflow'):
2892 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2893 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2894 cnf['DB::Unicode'] == 'false':
2895 engine_args['use_native_unicode'] = False
2897 # Monkey patch a new dialect in in order to support service= syntax
2898 import sqlalchemy.dialects.postgresql
2899 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2900 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2901 def create_connect_args(self, url):
2902 if str(url).startswith('postgresql://service='):
2904 servicename = str(url)[21:]
2905 return (['service=%s' % servicename], {})
2907 return PGDialect_psycopg2.create_connect_args(self, url)
2909 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2912 self.db_pg = create_engine(connstr, **engine_args)
2913 self.db_meta = MetaData()
2914 self.db_meta.bind = self.db_pg
2915 self.db_smaker = sessionmaker(bind=self.db_pg,
2919 self.__setuptables()
2920 self.__setupmappers()
2922 except OperationalError as e:
2924 utils.fubar("Cannot connect to database (%s)" % str(e))
2926 self.pid = os.getpid()
2928 def session(self, work_mem = 0):
2930 Returns a new session object. If a work_mem parameter is provided a new
2931 transaction is started and the work_mem parameter is set for this
2932 transaction. The work_mem parameter is measured in MB. A default value
2933 will be used if the parameter is not set.
2935 # reinitialize DBConn in new processes
2936 if self.pid != os.getpid():
2939 session = self.db_smaker()
2941 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2944 __all__.append('DBConn')