5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 warnings.filterwarnings('ignore', \
85 "Predicate of partial index .* ignored during reflection", \
89 ################################################################################
91 # Patch in support for the debversion field type so that it works during
95 # that is for sqlalchemy 0.6
96 UserDefinedType = sqltypes.UserDefinedType
98 # this one for sqlalchemy 0.5
99 UserDefinedType = sqltypes.TypeEngine
101 class DebVersion(UserDefinedType):
102 def get_col_spec(self):
105 def bind_processor(self, dialect):
108 # ' = None' is needed for sqlalchemy 0.5:
109 def result_processor(self, dialect, coltype = None):
112 sa_major_version = sqlalchemy.__version__[0:3]
113 if sa_major_version in ["0.5", "0.6", "0.7", "0.8"]:
114 from sqlalchemy.databases import postgres
115 postgres.ischema_names['debversion'] = DebVersion
117 raise Exception("dak only ported to SQLA versions 0.5 to 0.8. See daklib/dbconn.py")
119 ################################################################################
121 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
123 ################################################################################
125 def session_wrapper(fn):
127 Wrapper around common ".., session=None):" handling. If the wrapped
128 function is called without passing 'session', we create a local one
129 and destroy it when the function ends.
131 Also attaches a commit_or_flush method to the session; if we created a
132 local session, this is a synonym for session.commit(), otherwise it is a
133 synonym for session.flush().
136 def wrapped(*args, **kwargs):
137 private_transaction = False
139 # Find the session object
140 session = kwargs.get('session')
143 if len(args) <= len(getargspec(fn)[0]) - 1:
144 # No session specified as last argument or in kwargs
145 private_transaction = True
146 session = kwargs['session'] = DBConn().session()
148 # Session is last argument in args
152 session = args[-1] = DBConn().session()
153 private_transaction = True
155 if private_transaction:
156 session.commit_or_flush = session.commit
158 session.commit_or_flush = session.flush
161 return fn(*args, **kwargs)
163 if private_transaction:
164 # We created a session; close it.
167 wrapped.__doc__ = fn.__doc__
168 wrapped.func_name = fn.func_name
172 __all__.append('session_wrapper')
174 ################################################################################
176 class ORMObject(object):
178 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
179 derived classes must implement the properties() method.
182 def properties(self):
184 This method should be implemented by all derived classes and returns a
185 list of the important properties. The properties 'created' and
186 'modified' will be added automatically. A suffix '_count' should be
187 added to properties that are lists or query objects. The most important
188 property name should be returned as the first element in the list
189 because it is used by repr().
195 Returns a JSON representation of the object based on the properties
196 returned from the properties() method.
199 # add created and modified
200 all_properties = self.properties() + ['created', 'modified']
201 for property in all_properties:
202 # check for list or query
203 if property[-6:] == '_count':
204 real_property = property[:-6]
205 if not hasattr(self, real_property):
207 value = getattr(self, real_property)
208 if hasattr(value, '__len__'):
211 elif hasattr(value, 'count'):
212 # query (but not during validation)
213 if self.in_validation:
215 value = value.count()
217 raise KeyError('Do not understand property %s.' % property)
219 if not hasattr(self, property):
222 value = getattr(self, property)
226 elif isinstance(value, ORMObject):
227 # use repr() for ORMObject types
230 # we want a string for all other types because json cannot
233 data[property] = value
234 return json.dumps(data)
238 Returns the name of the class.
240 return type(self).__name__
244 Returns a short string representation of the object using the first
245 element from the properties() method.
247 primary_property = self.properties()[0]
248 value = getattr(self, primary_property)
249 return '<%s %s>' % (self.classname(), str(value))
253 Returns a human readable form of the object using the properties()
256 return '<%s %s>' % (self.classname(), self.json())
258 def not_null_constraints(self):
260 Returns a list of properties that must be not NULL. Derived classes
261 should override this method if needed.
265 validation_message = \
266 "Validation failed because property '%s' must not be empty in object\n%s"
268 in_validation = False
272 This function validates the not NULL constraints as returned by
273 not_null_constraints(). It raises the DBUpdateError exception if
276 for property in self.not_null_constraints():
277 # TODO: It is a bit awkward that the mapper configuration allow
278 # directly setting the numeric _id columns. We should get rid of it
280 if hasattr(self, property + '_id') and \
281 getattr(self, property + '_id') is not None:
283 if not hasattr(self, property) or getattr(self, property) is None:
284 # str() might lead to races due to a 2nd flush
285 self.in_validation = True
286 message = self.validation_message % (property, str(self))
287 self.in_validation = False
288 raise DBUpdateError(message)
292 def get(cls, primary_key, session = None):
294 This is a support function that allows getting an object by its primary
297 Architecture.get(3[, session])
299 instead of the more verbose
301 session.query(Architecture).get(3)
303 return session.query(cls).get(primary_key)
305 def session(self, replace = False):
307 Returns the current session that is associated with the object. May
308 return None is object is in detached state.
311 return object_session(self)
313 def clone(self, session = None):
315 Clones the current object in a new session and returns the new clone. A
316 fresh session is created if the optional session parameter is not
317 provided. The function will fail if a session is provided and has
320 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
321 an existing object to allow several threads to work with their own
322 instances of an ORMObject.
324 WARNING: Only persistent (committed) objects can be cloned. Changes
325 made to the original object that are not committed yet will get lost.
326 The session of the new object will always be rolled back to avoid
330 if self.session() is None:
331 raise RuntimeError( \
332 'Method clone() failed for detached object:\n%s' % self)
333 self.session().flush()
334 mapper = object_mapper(self)
335 primary_key = mapper.primary_key_from_instance(self)
336 object_class = self.__class__
338 session = DBConn().session()
339 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
340 raise RuntimeError( \
341 'Method clone() failed due to unflushed changes in session.')
342 new_object = session.query(object_class).get(primary_key)
344 if new_object is None:
345 raise RuntimeError( \
346 'Method clone() failed for non-persistent object:\n%s' % self)
349 __all__.append('ORMObject')
351 ################################################################################
353 class Validator(MapperExtension):
355 This class calls the validate() method for each instance for the
356 'before_update' and 'before_insert' events. A global object validator is
357 used for configuring the individual mappers.
360 def before_update(self, mapper, connection, instance):
364 def before_insert(self, mapper, connection, instance):
368 validator = Validator()
370 ################################################################################
372 class ACL(ORMObject):
374 return "<ACL {0}>".format(self.name)
376 __all__.append('ACL')
378 class ACLPerSource(ORMObject):
380 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
382 __all__.append('ACLPerSource')
384 ################################################################################
386 class Architecture(ORMObject):
387 def __init__(self, arch_string = None, description = None):
388 self.arch_string = arch_string
389 self.description = description
391 def __eq__(self, val):
392 if isinstance(val, str):
393 return (self.arch_string== val)
394 # This signals to use the normal comparison operator
395 return NotImplemented
397 def __ne__(self, val):
398 if isinstance(val, str):
399 return (self.arch_string != val)
400 # This signals to use the normal comparison operator
401 return NotImplemented
403 def properties(self):
404 return ['arch_string', 'arch_id', 'suites_count']
406 def not_null_constraints(self):
407 return ['arch_string']
409 __all__.append('Architecture')
412 def get_architecture(architecture, session=None):
414 Returns database id for given C{architecture}.
416 @type architecture: string
417 @param architecture: The name of the architecture
419 @type session: Session
420 @param session: Optional SQLA session object (a temporary one will be
421 generated if not supplied)
424 @return: Architecture object for the given arch (None if not present)
427 q = session.query(Architecture).filter_by(arch_string=architecture)
431 except NoResultFound:
434 __all__.append('get_architecture')
436 ################################################################################
438 class Archive(object):
439 def __init__(self, *args, **kwargs):
443 return '<Archive %s>' % self.archive_name
445 __all__.append('Archive')
448 def get_archive(archive, session=None):
450 returns database id for given C{archive}.
452 @type archive: string
453 @param archive: the name of the arhive
455 @type session: Session
456 @param session: Optional SQLA session object (a temporary one will be
457 generated if not supplied)
460 @return: Archive object for the given name (None if not present)
463 archive = archive.lower()
465 q = session.query(Archive).filter_by(archive_name=archive)
469 except NoResultFound:
472 __all__.append('get_archive')
474 ################################################################################
476 class ArchiveFile(object):
477 def __init__(self, archive=None, component=None, file=None):
478 self.archive = archive
479 self.component = component
483 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
485 __all__.append('ArchiveFile')
487 ################################################################################
489 class BinContents(ORMObject):
490 def __init__(self, file = None, binary = None):
494 def properties(self):
495 return ['file', 'binary']
497 __all__.append('BinContents')
499 ################################################################################
501 def subprocess_setup():
502 # Python installs a SIGPIPE handler by default. This is usually not what
503 # non-Python subprocesses expect.
504 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
506 class DBBinary(ORMObject):
507 def __init__(self, package = None, source = None, version = None, \
508 maintainer = None, architecture = None, poolfile = None, \
509 binarytype = 'deb', fingerprint=None):
510 self.package = package
512 self.version = version
513 self.maintainer = maintainer
514 self.architecture = architecture
515 self.poolfile = poolfile
516 self.binarytype = binarytype
517 self.fingerprint = fingerprint
521 return self.binary_id
523 def properties(self):
524 return ['package', 'version', 'maintainer', 'source', 'architecture', \
525 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
526 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
528 def not_null_constraints(self):
529 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
532 metadata = association_proxy('key', 'value')
534 def scan_contents(self):
536 Yields the contents of the package. Only regular files are yielded and
537 the path names are normalized after converting them from either utf-8
538 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
539 package does not contain any regular file.
541 fullpath = self.poolfile.fullpath
542 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
543 preexec_fn = subprocess_setup)
544 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
545 for member in tar.getmembers():
546 if not member.isdir():
547 name = normpath(member.name)
548 # enforce proper utf-8 encoding
551 except UnicodeDecodeError:
552 name = name.decode('iso8859-1').encode('utf-8')
558 def read_control(self):
560 Reads the control information from a binary.
563 @return: stanza text of the control section.
566 fullpath = self.poolfile.fullpath
567 deb_file = open(fullpath, 'r')
568 stanza = utils.deb_extract_control(deb_file)
573 def read_control_fields(self):
575 Reads the control information from a binary and return
579 @return: fields of the control section as a dictionary.
582 stanza = self.read_control()
583 return apt_pkg.TagSection(stanza)
585 __all__.append('DBBinary')
588 def get_suites_binary_in(package, session=None):
590 Returns list of Suite objects which given C{package} name is in
593 @param package: DBBinary package name to search for
596 @return: list of Suite objects for the given package
599 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
601 __all__.append('get_suites_binary_in')
604 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
606 Returns the component name of the newest binary package in suite_list or
607 None if no package is found. The result can be optionally filtered by a list
608 of architecture names.
611 @param package: DBBinary package name to search for
613 @type suite_list: list of str
614 @param suite_list: list of suite_name items
616 @type arch_list: list of str
617 @param arch_list: optional list of arch_string items that defaults to []
619 @rtype: str or NoneType
620 @return: name of component or None
623 q = session.query(DBBinary).filter_by(package = package). \
624 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
625 if len(arch_list) > 0:
626 q = q.join(DBBinary.architecture). \
627 filter(Architecture.arch_string.in_(arch_list))
628 binary = q.order_by(desc(DBBinary.version)).first()
632 return binary.poolfile.component.component_name
634 __all__.append('get_component_by_package_suite')
636 ################################################################################
638 class BuildQueue(object):
639 def __init__(self, *args, **kwargs):
643 return '<BuildQueue %s>' % self.queue_name
645 __all__.append('BuildQueue')
647 ################################################################################
649 class Component(ORMObject):
650 def __init__(self, component_name = None):
651 self.component_name = component_name
653 def __eq__(self, val):
654 if isinstance(val, str):
655 return (self.component_name == val)
656 # This signals to use the normal comparison operator
657 return NotImplemented
659 def __ne__(self, val):
660 if isinstance(val, str):
661 return (self.component_name != val)
662 # This signals to use the normal comparison operator
663 return NotImplemented
665 def properties(self):
666 return ['component_name', 'component_id', 'description', \
667 'meets_dfsg', 'overrides_count']
669 def not_null_constraints(self):
670 return ['component_name']
673 __all__.append('Component')
676 def get_component(component, session=None):
678 Returns database id for given C{component}.
680 @type component: string
681 @param component: The name of the override type
684 @return: the database id for the given component
687 component = component.lower()
689 q = session.query(Component).filter_by(component_name=component)
693 except NoResultFound:
696 __all__.append('get_component')
699 def get_mapped_component(component_name, session=None):
700 """get component after mappings
702 Evaluate component mappings from ComponentMappings in dak.conf for the
703 given component name.
705 @todo: ansgar wants to get rid of this. It's currently only used for
708 @type component_name: str
709 @param component_name: component name
711 @param session: database session
713 @rtype: L{daklib.dbconn.Component} or C{None}
714 @return: component after applying maps or C{None}
717 for m in cnf.value_list("ComponentMappings"):
718 (src, dst) = m.split()
719 if component_name == src:
721 component = session.query(Component).filter_by(component_name=component_name).first()
724 __all__.append('get_mapped_component')
727 def get_component_names(session=None):
729 Returns list of strings of component names.
732 @return: list of strings of component names
735 return [ x.component_name for x in session.query(Component).all() ]
737 __all__.append('get_component_names')
739 ################################################################################
741 class DBConfig(object):
742 def __init__(self, *args, **kwargs):
746 return '<DBConfig %s>' % self.name
748 __all__.append('DBConfig')
750 ################################################################################
753 def get_or_set_contents_file_id(filename, session=None):
755 Returns database id for given filename.
757 If no matching file is found, a row is inserted.
759 @type filename: string
760 @param filename: The filename
761 @type session: SQLAlchemy
762 @param session: Optional SQL session object (a temporary one will be
763 generated if not supplied). If not passed, a commit will be performed at
764 the end of the function, otherwise the caller is responsible for commiting.
767 @return: the database id for the given component
770 q = session.query(ContentFilename).filter_by(filename=filename)
773 ret = q.one().cafilename_id
774 except NoResultFound:
775 cf = ContentFilename()
776 cf.filename = filename
778 session.commit_or_flush()
779 ret = cf.cafilename_id
783 __all__.append('get_or_set_contents_file_id')
786 def get_contents(suite, overridetype, section=None, session=None):
788 Returns contents for a suite / overridetype combination, limiting
789 to a section if not None.
792 @param suite: Suite object
794 @type overridetype: OverrideType
795 @param overridetype: OverrideType object
797 @type section: Section
798 @param section: Optional section object to limit results to
800 @type session: SQLAlchemy
801 @param session: Optional SQL session object (a temporary one will be
802 generated if not supplied)
805 @return: ResultsProxy object set up to return tuples of (filename, section,
809 # find me all of the contents for a given suite
810 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
814 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
815 JOIN content_file_names n ON (c.filename=n.id)
816 JOIN binaries b ON (b.id=c.binary_pkg)
817 JOIN override o ON (o.package=b.package)
818 JOIN section s ON (s.id=o.section)
819 WHERE o.suite = :suiteid AND o.type = :overridetypeid
820 AND b.type=:overridetypename"""
822 vals = {'suiteid': suite.suite_id,
823 'overridetypeid': overridetype.overridetype_id,
824 'overridetypename': overridetype.overridetype}
826 if section is not None:
827 contents_q += " AND s.id = :sectionid"
828 vals['sectionid'] = section.section_id
830 contents_q += " ORDER BY fn"
832 return session.execute(contents_q, vals)
834 __all__.append('get_contents')
836 ################################################################################
838 class ContentFilepath(object):
839 def __init__(self, *args, **kwargs):
843 return '<ContentFilepath %s>' % self.filepath
845 __all__.append('ContentFilepath')
848 def get_or_set_contents_path_id(filepath, session=None):
850 Returns database id for given path.
852 If no matching file is found, a row is inserted.
854 @type filepath: string
855 @param filepath: The filepath
857 @type session: SQLAlchemy
858 @param session: Optional SQL session object (a temporary one will be
859 generated if not supplied). If not passed, a commit will be performed at
860 the end of the function, otherwise the caller is responsible for commiting.
863 @return: the database id for the given path
866 q = session.query(ContentFilepath).filter_by(filepath=filepath)
869 ret = q.one().cafilepath_id
870 except NoResultFound:
871 cf = ContentFilepath()
872 cf.filepath = filepath
874 session.commit_or_flush()
875 ret = cf.cafilepath_id
879 __all__.append('get_or_set_contents_path_id')
881 ################################################################################
883 class ContentAssociation(object):
884 def __init__(self, *args, **kwargs):
888 return '<ContentAssociation %s>' % self.ca_id
890 __all__.append('ContentAssociation')
892 def insert_content_paths(binary_id, fullpaths, session=None):
894 Make sure given path is associated with given binary id
897 @param binary_id: the id of the binary
898 @type fullpaths: list
899 @param fullpaths: the list of paths of the file being associated with the binary
900 @type session: SQLAlchemy session
901 @param session: Optional SQLAlchemy session. If this is passed, the caller
902 is responsible for ensuring a transaction has begun and committing the
903 results or rolling back based on the result code. If not passed, a commit
904 will be performed at the end of the function, otherwise the caller is
905 responsible for commiting.
907 @return: True upon success
912 session = DBConn().session()
917 def generate_path_dicts():
918 for fullpath in fullpaths:
919 if fullpath.startswith( './' ):
920 fullpath = fullpath[2:]
922 yield {'filename':fullpath, 'id': binary_id }
924 for d in generate_path_dicts():
925 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
934 traceback.print_exc()
936 # Only rollback if we set up the session ourself
943 __all__.append('insert_content_paths')
945 ################################################################################
947 class DSCFile(object):
948 def __init__(self, *args, **kwargs):
952 return '<DSCFile %s>' % self.dscfile_id
954 __all__.append('DSCFile')
957 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
959 Returns a list of DSCFiles which may be empty
961 @type dscfile_id: int (optional)
962 @param dscfile_id: the dscfile_id of the DSCFiles to find
964 @type source_id: int (optional)
965 @param source_id: the source id related to the DSCFiles to find
967 @type poolfile_id: int (optional)
968 @param poolfile_id: the poolfile id related to the DSCFiles to find
971 @return: Possibly empty list of DSCFiles
974 q = session.query(DSCFile)
976 if dscfile_id is not None:
977 q = q.filter_by(dscfile_id=dscfile_id)
979 if source_id is not None:
980 q = q.filter_by(source_id=source_id)
982 if poolfile_id is not None:
983 q = q.filter_by(poolfile_id=poolfile_id)
987 __all__.append('get_dscfiles')
989 ################################################################################
991 class ExternalOverride(ORMObject):
992 def __init__(self, *args, **kwargs):
996 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
998 __all__.append('ExternalOverride')
1000 ################################################################################
1002 class PoolFile(ORMObject):
1003 def __init__(self, filename = None, filesize = -1, \
1005 self.filename = filename
1006 self.filesize = filesize
1007 self.md5sum = md5sum
1011 session = DBConn().session().object_session(self)
1012 af = session.query(ArchiveFile).join(Archive) \
1013 .filter(ArchiveFile.file == self) \
1014 .order_by(Archive.tainted.desc()).first()
1018 def component(self):
1019 session = DBConn().session().object_session(self)
1020 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1021 .group_by(ArchiveFile.component_id).one()
1022 return session.query(Component).get(component_id)
1026 return os.path.basename(self.filename)
1028 def is_valid(self, filesize = -1, md5sum = None):
1029 return self.filesize == long(filesize) and self.md5sum == md5sum
1031 def properties(self):
1032 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1033 'sha256sum', 'source', 'binary', 'last_used']
1035 def not_null_constraints(self):
1036 return ['filename', 'md5sum']
1038 def identical_to(self, filename):
1040 compare size and hash with the given file
1043 @return: true if the given file has the same size and hash as this object; false otherwise
1045 st = os.stat(filename)
1046 if self.filesize != st.st_size:
1049 f = open(filename, "r")
1050 sha256sum = apt_pkg.sha256sum(f)
1051 if sha256sum != self.sha256sum:
1056 __all__.append('PoolFile')
1059 def get_poolfile_like_name(filename, session=None):
1061 Returns an array of PoolFile objects which are like the given name
1063 @type filename: string
1064 @param filename: the filename of the file to check against the DB
1067 @return: array of PoolFile objects
1070 # TODO: There must be a way of properly using bind parameters with %FOO%
1071 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1075 __all__.append('get_poolfile_like_name')
1077 ################################################################################
1079 class Fingerprint(ORMObject):
1080 def __init__(self, fingerprint = None):
1081 self.fingerprint = fingerprint
1083 def properties(self):
1084 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1087 def not_null_constraints(self):
1088 return ['fingerprint']
1090 __all__.append('Fingerprint')
1093 def get_fingerprint(fpr, session=None):
1095 Returns Fingerprint object for given fpr.
1098 @param fpr: The fpr to find / add
1100 @type session: SQLAlchemy
1101 @param session: Optional SQL session object (a temporary one will be
1102 generated if not supplied).
1105 @return: the Fingerprint object for the given fpr or None
1108 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1112 except NoResultFound:
1117 __all__.append('get_fingerprint')
1120 def get_or_set_fingerprint(fpr, session=None):
1122 Returns Fingerprint object for given fpr.
1124 If no matching fpr is found, a row is inserted.
1127 @param fpr: The fpr to find / add
1129 @type session: SQLAlchemy
1130 @param session: Optional SQL session object (a temporary one will be
1131 generated if not supplied). If not passed, a commit will be performed at
1132 the end of the function, otherwise the caller is responsible for commiting.
1133 A flush will be performed either way.
1136 @return: the Fingerprint object for the given fpr
1139 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1143 except NoResultFound:
1144 fingerprint = Fingerprint()
1145 fingerprint.fingerprint = fpr
1146 session.add(fingerprint)
1147 session.commit_or_flush()
1152 __all__.append('get_or_set_fingerprint')
1154 ################################################################################
1156 # Helper routine for Keyring class
1157 def get_ldap_name(entry):
1159 for k in ["cn", "mn", "sn"]:
1161 if ret and ret[0] != "" and ret[0] != "-":
1163 return " ".join(name)
1165 ################################################################################
1167 class Keyring(object):
1168 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1169 " --with-colons --fingerprint --fingerprint"
1174 def __init__(self, *args, **kwargs):
1178 return '<Keyring %s>' % self.keyring_name
1180 def de_escape_gpg_str(self, txt):
1181 esclist = re.split(r'(\\x..)', txt)
1182 for x in range(1,len(esclist),2):
1183 esclist[x] = "%c" % (int(esclist[x][2:],16))
1184 return "".join(esclist)
1186 def parse_address(self, uid):
1187 """parses uid and returns a tuple of real name and email address"""
1189 (name, address) = email.Utils.parseaddr(uid)
1190 name = re.sub(r"\s*[(].*[)]", "", name)
1191 name = self.de_escape_gpg_str(name)
1194 return (name, address)
1196 def load_keys(self, keyring):
1197 if not self.keyring_id:
1198 raise Exception('Must be initialized with database information')
1200 k = os.popen(self.gpg_invocation % keyring, "r")
1202 need_fingerprint = False
1205 field = line.split(":")
1206 if field[0] == "pub":
1209 (name, addr) = self.parse_address(field[9])
1211 self.keys[key]["email"] = addr
1212 self.keys[key]["name"] = name
1213 need_fingerprint = True
1214 elif key and field[0] == "uid":
1215 (name, addr) = self.parse_address(field[9])
1216 if "email" not in self.keys[key] and "@" in addr:
1217 self.keys[key]["email"] = addr
1218 self.keys[key]["name"] = name
1219 elif need_fingerprint and field[0] == "fpr":
1220 self.keys[key]["fingerprints"] = [field[9]]
1221 self.fpr_lookup[field[9]] = key
1222 need_fingerprint = False
1224 def import_users_from_ldap(self, session):
1228 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1229 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1230 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1232 l = ldap.open(LDAPServer)
1235 # TODO: This should request a new context and use
1236 # connection-specific options (i.e. "l.set_option(...)")
1238 # Request a new TLS context. If there was already one, libldap
1239 # would not change the TLS options (like which CAs to trust).
1240 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1241 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1242 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1243 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1246 l.simple_bind_s("","")
1247 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1248 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1249 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1251 ldap_fin_uid_id = {}
1258 uid = entry["uid"][0]
1259 name = get_ldap_name(entry)
1260 fingerprints = entry["keyFingerPrint"]
1262 for f in fingerprints:
1263 key = self.fpr_lookup.get(f, None)
1264 if key not in self.keys:
1266 self.keys[key]["uid"] = uid
1270 keyid = get_or_set_uid(uid, session).uid_id
1271 byuid[keyid] = (uid, name)
1272 byname[uid] = (keyid, name)
1274 return (byname, byuid)
1276 def generate_users_from_keyring(self, format, session):
1280 for x in self.keys.keys():
1281 if "email" not in self.keys[x]:
1283 self.keys[x]["uid"] = format % "invalid-uid"
1285 uid = format % self.keys[x]["email"]
1286 keyid = get_or_set_uid(uid, session).uid_id
1287 byuid[keyid] = (uid, self.keys[x]["name"])
1288 byname[uid] = (keyid, self.keys[x]["name"])
1289 self.keys[x]["uid"] = uid
1292 uid = format % "invalid-uid"
1293 keyid = get_or_set_uid(uid, session).uid_id
1294 byuid[keyid] = (uid, "ungeneratable user id")
1295 byname[uid] = (keyid, "ungeneratable user id")
1297 return (byname, byuid)
1299 __all__.append('Keyring')
1302 def get_keyring(keyring, session=None):
1304 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1305 If C{keyring} already has an entry, simply return the existing Keyring
1307 @type keyring: string
1308 @param keyring: the keyring name
1311 @return: the Keyring object for this keyring
1314 q = session.query(Keyring).filter_by(keyring_name=keyring)
1318 except NoResultFound:
1321 __all__.append('get_keyring')
1324 def get_active_keyring_paths(session=None):
1327 @return: list of active keyring paths
1329 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1331 __all__.append('get_active_keyring_paths')
1334 def get_primary_keyring_path(session=None):
1336 Get the full path to the highest priority active keyring
1339 @return: path to the active keyring with the highest priority or None if no
1340 keyring is configured
1342 keyrings = get_active_keyring_paths()
1344 if len(keyrings) > 0:
1349 __all__.append('get_primary_keyring_path')
1351 ################################################################################
1353 class DBChange(object):
1354 def __init__(self, *args, **kwargs):
1358 return '<DBChange %s>' % self.changesname
1360 __all__.append('DBChange')
1363 def get_dbchange(filename, session=None):
1365 returns DBChange object for given C{filename}.
1367 @type filename: string
1368 @param filename: the name of the file
1370 @type session: Session
1371 @param session: Optional SQLA session object (a temporary one will be
1372 generated if not supplied)
1375 @return: DBChange object for the given filename (C{None} if not present)
1378 q = session.query(DBChange).filter_by(changesname=filename)
1382 except NoResultFound:
1385 __all__.append('get_dbchange')
1387 ################################################################################
1389 class Maintainer(ORMObject):
1390 def __init__(self, name = None):
1393 def properties(self):
1394 return ['name', 'maintainer_id']
1396 def not_null_constraints(self):
1399 def get_split_maintainer(self):
1400 if not hasattr(self, 'name') or self.name is None:
1401 return ('', '', '', '')
1403 return fix_maintainer(self.name.strip())
1405 __all__.append('Maintainer')
1408 def get_or_set_maintainer(name, session=None):
1410 Returns Maintainer object for given maintainer name.
1412 If no matching maintainer name is found, a row is inserted.
1415 @param name: The maintainer name to add
1417 @type session: SQLAlchemy
1418 @param session: Optional SQL session object (a temporary one will be
1419 generated if not supplied). If not passed, a commit will be performed at
1420 the end of the function, otherwise the caller is responsible for commiting.
1421 A flush will be performed either way.
1424 @return: the Maintainer object for the given maintainer
1427 q = session.query(Maintainer).filter_by(name=name)
1430 except NoResultFound:
1431 maintainer = Maintainer()
1432 maintainer.name = name
1433 session.add(maintainer)
1434 session.commit_or_flush()
1439 __all__.append('get_or_set_maintainer')
1442 def get_maintainer(maintainer_id, session=None):
1444 Return the name of the maintainer behind C{maintainer_id} or None if that
1445 maintainer_id is invalid.
1447 @type maintainer_id: int
1448 @param maintainer_id: the id of the maintainer
1451 @return: the Maintainer with this C{maintainer_id}
1454 return session.query(Maintainer).get(maintainer_id)
1456 __all__.append('get_maintainer')
1458 ################################################################################
1460 class NewComment(object):
1461 def __init__(self, *args, **kwargs):
1465 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1467 __all__.append('NewComment')
1470 def has_new_comment(policy_queue, package, version, session=None):
1472 Returns true if the given combination of C{package}, C{version} has a comment.
1474 @type package: string
1475 @param package: name of the package
1477 @type version: string
1478 @param version: package version
1480 @type session: Session
1481 @param session: Optional SQLA session object (a temporary one will be
1482 generated if not supplied)
1488 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1489 q = q.filter_by(package=package)
1490 q = q.filter_by(version=version)
1492 return bool(q.count() > 0)
1494 __all__.append('has_new_comment')
1497 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1499 Returns (possibly empty) list of NewComment objects for the given
1502 @type package: string (optional)
1503 @param package: name of the package
1505 @type version: string (optional)
1506 @param version: package version
1508 @type comment_id: int (optional)
1509 @param comment_id: An id of a comment
1511 @type session: Session
1512 @param session: Optional SQLA session object (a temporary one will be
1513 generated if not supplied)
1516 @return: A (possibly empty) list of NewComment objects will be returned
1519 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1520 if package is not None: q = q.filter_by(package=package)
1521 if version is not None: q = q.filter_by(version=version)
1522 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1526 __all__.append('get_new_comments')
1528 ################################################################################
1530 class Override(ORMObject):
1531 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1532 section = None, priority = None):
1533 self.package = package
1535 self.component = component
1536 self.overridetype = overridetype
1537 self.section = section
1538 self.priority = priority
1540 def properties(self):
1541 return ['package', 'suite', 'component', 'overridetype', 'section', \
1544 def not_null_constraints(self):
1545 return ['package', 'suite', 'component', 'overridetype', 'section']
1547 __all__.append('Override')
1550 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1552 Returns Override object for the given parameters
1554 @type package: string
1555 @param package: The name of the package
1557 @type suite: string, list or None
1558 @param suite: The name of the suite (or suites if a list) to limit to. If
1559 None, don't limit. Defaults to None.
1561 @type component: string, list or None
1562 @param component: The name of the component (or components if a list) to
1563 limit to. If None, don't limit. Defaults to None.
1565 @type overridetype: string, list or None
1566 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1567 limit to. If None, don't limit. Defaults to None.
1569 @type session: Session
1570 @param session: Optional SQLA session object (a temporary one will be
1571 generated if not supplied)
1574 @return: A (possibly empty) list of Override objects will be returned
1577 q = session.query(Override)
1578 q = q.filter_by(package=package)
1580 if suite is not None:
1581 if not isinstance(suite, list): suite = [suite]
1582 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1584 if component is not None:
1585 if not isinstance(component, list): component = [component]
1586 q = q.join(Component).filter(Component.component_name.in_(component))
1588 if overridetype is not None:
1589 if not isinstance(overridetype, list): overridetype = [overridetype]
1590 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1594 __all__.append('get_override')
1597 ################################################################################
1599 class OverrideType(ORMObject):
1600 def __init__(self, overridetype = None):
1601 self.overridetype = overridetype
1603 def properties(self):
1604 return ['overridetype', 'overridetype_id', 'overrides_count']
1606 def not_null_constraints(self):
1607 return ['overridetype']
1609 __all__.append('OverrideType')
1612 def get_override_type(override_type, session=None):
1614 Returns OverrideType object for given C{override type}.
1616 @type override_type: string
1617 @param override_type: The name of the override type
1619 @type session: Session
1620 @param session: Optional SQLA session object (a temporary one will be
1621 generated if not supplied)
1624 @return: the database id for the given override type
1627 q = session.query(OverrideType).filter_by(overridetype=override_type)
1631 except NoResultFound:
1634 __all__.append('get_override_type')
1636 ################################################################################
1638 class PolicyQueue(object):
1639 def __init__(self, *args, **kwargs):
1643 return '<PolicyQueue %s>' % self.queue_name
1645 __all__.append('PolicyQueue')
1648 def get_policy_queue(queuename, session=None):
1650 Returns PolicyQueue object for given C{queue name}
1652 @type queuename: string
1653 @param queuename: The name of the queue
1655 @type session: Session
1656 @param session: Optional SQLA session object (a temporary one will be
1657 generated if not supplied)
1660 @return: PolicyQueue object for the given queue
1663 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1667 except NoResultFound:
1670 __all__.append('get_policy_queue')
1672 ################################################################################
1674 class PolicyQueueUpload(object):
1675 def __cmp__(self, other):
1676 ret = cmp(self.changes.source, other.changes.source)
1678 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1680 if self.source is not None and other.source is None:
1682 elif self.source is None and other.source is not None:
1685 ret = cmp(self.changes.changesname, other.changes.changesname)
1688 __all__.append('PolicyQueueUpload')
1690 ################################################################################
1692 class PolicyQueueByhandFile(object):
1695 __all__.append('PolicyQueueByhandFile')
1697 ################################################################################
1699 class Priority(ORMObject):
1700 def __init__(self, priority = None, level = None):
1701 self.priority = priority
1704 def properties(self):
1705 return ['priority', 'priority_id', 'level', 'overrides_count']
1707 def not_null_constraints(self):
1708 return ['priority', 'level']
1710 def __eq__(self, val):
1711 if isinstance(val, str):
1712 return (self.priority == val)
1713 # This signals to use the normal comparison operator
1714 return NotImplemented
1716 def __ne__(self, val):
1717 if isinstance(val, str):
1718 return (self.priority != val)
1719 # This signals to use the normal comparison operator
1720 return NotImplemented
1722 __all__.append('Priority')
1725 def get_priority(priority, session=None):
1727 Returns Priority object for given C{priority name}.
1729 @type priority: string
1730 @param priority: The name of the priority
1732 @type session: Session
1733 @param session: Optional SQLA session object (a temporary one will be
1734 generated if not supplied)
1737 @return: Priority object for the given priority
1740 q = session.query(Priority).filter_by(priority=priority)
1744 except NoResultFound:
1747 __all__.append('get_priority')
1750 def get_priorities(session=None):
1752 Returns dictionary of priority names -> id mappings
1754 @type session: Session
1755 @param session: Optional SQL session object (a temporary one will be
1756 generated if not supplied)
1759 @return: dictionary of priority names -> id mappings
1763 q = session.query(Priority)
1765 ret[x.priority] = x.priority_id
1769 __all__.append('get_priorities')
1771 ################################################################################
1773 class Section(ORMObject):
1774 def __init__(self, section = None):
1775 self.section = section
1777 def properties(self):
1778 return ['section', 'section_id', 'overrides_count']
1780 def not_null_constraints(self):
1783 def __eq__(self, val):
1784 if isinstance(val, str):
1785 return (self.section == val)
1786 # This signals to use the normal comparison operator
1787 return NotImplemented
1789 def __ne__(self, val):
1790 if isinstance(val, str):
1791 return (self.section != val)
1792 # This signals to use the normal comparison operator
1793 return NotImplemented
1795 __all__.append('Section')
1798 def get_section(section, session=None):
1800 Returns Section object for given C{section name}.
1802 @type section: string
1803 @param section: The name of the section
1805 @type session: Session
1806 @param session: Optional SQLA session object (a temporary one will be
1807 generated if not supplied)
1810 @return: Section object for the given section name
1813 q = session.query(Section).filter_by(section=section)
1817 except NoResultFound:
1820 __all__.append('get_section')
1823 def get_sections(session=None):
1825 Returns dictionary of section names -> id mappings
1827 @type session: Session
1828 @param session: Optional SQL session object (a temporary one will be
1829 generated if not supplied)
1832 @return: dictionary of section names -> id mappings
1836 q = session.query(Section)
1838 ret[x.section] = x.section_id
1842 __all__.append('get_sections')
1844 ################################################################################
1846 class SignatureHistory(ORMObject):
1848 def from_signed_file(cls, signed_file):
1849 """signature history entry from signed file
1851 @type signed_file: L{daklib.gpg.SignedFile}
1852 @param signed_file: signed file
1854 @rtype: L{SignatureHistory}
1857 self.fingerprint = signed_file.primary_fingerprint
1858 self.signature_timestamp = signed_file.signature_timestamp
1859 self.contents_sha1 = signed_file.contents_sha1()
1862 __all__.append('SignatureHistory')
1864 ################################################################################
1866 class SrcContents(ORMObject):
1867 def __init__(self, file = None, source = None):
1869 self.source = source
1871 def properties(self):
1872 return ['file', 'source']
1874 __all__.append('SrcContents')
1876 ################################################################################
1878 from debian.debfile import Deb822
1880 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1881 class Dak822(Deb822):
1882 def _internal_parser(self, sequence, fields=None):
1883 # The key is non-whitespace, non-colon characters before any colon.
1884 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1885 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1886 multi = re.compile(key_part + r"$")
1887 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1889 wanted_field = lambda f: fields is None or f in fields
1891 if isinstance(sequence, basestring):
1892 sequence = sequence.splitlines()
1896 for line in self.gpg_stripped_paragraph(sequence):
1897 m = single.match(line)
1900 self[curkey] = content
1902 if not wanted_field(m.group('key')):
1906 curkey = m.group('key')
1907 content = m.group('data')
1910 m = multi.match(line)
1913 self[curkey] = content
1915 if not wanted_field(m.group('key')):
1919 curkey = m.group('key')
1923 m = multidata.match(line)
1925 content += '\n' + line # XXX not m.group('data')?
1929 self[curkey] = content
1932 class DBSource(ORMObject):
1933 def __init__(self, source = None, version = None, maintainer = None, \
1934 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1935 self.source = source
1936 self.version = version
1937 self.maintainer = maintainer
1938 self.changedby = changedby
1939 self.poolfile = poolfile
1940 self.install_date = install_date
1941 self.fingerprint = fingerprint
1945 return self.source_id
1947 def properties(self):
1948 return ['source', 'source_id', 'maintainer', 'changedby', \
1949 'fingerprint', 'poolfile', 'version', 'suites_count', \
1950 'install_date', 'binaries_count', 'uploaders_count']
1952 def not_null_constraints(self):
1953 return ['source', 'version', 'install_date', 'maintainer', \
1954 'changedby', 'poolfile']
1956 def read_control_fields(self):
1958 Reads the control information from a dsc
1961 @return: fields is the dsc information in a dictionary form
1963 fullpath = self.poolfile.fullpath
1964 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1967 metadata = association_proxy('key', 'value')
1969 def scan_contents(self):
1971 Returns a set of names for non directories. The path names are
1972 normalized after converting them from either utf-8 or iso8859-1
1975 fullpath = self.poolfile.fullpath
1976 from daklib.contents import UnpackedSource
1977 unpacked = UnpackedSource(fullpath)
1979 for name in unpacked.get_all_filenames():
1980 # enforce proper utf-8 encoding
1982 name.decode('utf-8')
1983 except UnicodeDecodeError:
1984 name = name.decode('iso8859-1').encode('utf-8')
1988 __all__.append('DBSource')
1991 def source_exists(source, source_version, suites = ["any"], session=None):
1993 Ensure that source exists somewhere in the archive for the binary
1994 upload being processed.
1995 1. exact match => 1.0-3
1996 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
1998 @type source: string
1999 @param source: source name
2001 @type source_version: string
2002 @param source_version: expected source version
2005 @param suites: list of suites to check in, default I{any}
2007 @type session: Session
2008 @param session: Optional SQLA session object (a temporary one will be
2009 generated if not supplied)
2012 @return: returns 1 if a source with expected version is found, otherwise 0
2019 from daklib.regexes import re_bin_only_nmu
2020 orig_source_version = re_bin_only_nmu.sub('', source_version)
2022 for suite in suites:
2023 q = session.query(DBSource).filter_by(source=source). \
2024 filter(DBSource.version.in_([source_version, orig_source_version]))
2026 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2027 s = get_suite(suite, session)
2029 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2030 considered_suites = [ vc.reference for vc in enhances_vcs ]
2031 considered_suites.append(s)
2033 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2038 # No source found so return not ok
2043 __all__.append('source_exists')
2046 def get_suites_source_in(source, session=None):
2048 Returns list of Suite objects which given C{source} name is in
2051 @param source: DBSource package name to search for
2054 @return: list of Suite objects for the given source
2057 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2059 __all__.append('get_suites_source_in')
2062 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2064 Returns list of DBSource objects for given C{source} name and other parameters
2067 @param source: DBSource package name to search for
2069 @type version: str or None
2070 @param version: DBSource version name to search for or None if not applicable
2072 @type dm_upload_allowed: bool
2073 @param dm_upload_allowed: If None, no effect. If True or False, only
2074 return packages with that dm_upload_allowed setting
2076 @type session: Session
2077 @param session: Optional SQL session object (a temporary one will be
2078 generated if not supplied)
2081 @return: list of DBSource objects for the given name (may be empty)
2084 q = session.query(DBSource).filter_by(source=source)
2086 if version is not None:
2087 q = q.filter_by(version=version)
2089 if dm_upload_allowed is not None:
2090 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2094 __all__.append('get_sources_from_name')
2096 # FIXME: This function fails badly if it finds more than 1 source package and
2097 # its implementation is trivial enough to be inlined.
2099 def get_source_in_suite(source, suite, session=None):
2101 Returns a DBSource object for a combination of C{source} and C{suite}.
2103 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2104 - B{suite} - a suite name, eg. I{unstable}
2106 @type source: string
2107 @param source: source package name
2110 @param suite: the suite name
2113 @return: the version for I{source} in I{suite}
2117 q = get_suite(suite, session).get_sources(source)
2120 except NoResultFound:
2123 __all__.append('get_source_in_suite')
2126 def import_metadata_into_db(obj, session=None):
2128 This routine works on either DBBinary or DBSource objects and imports
2129 their metadata into the database
2131 fields = obj.read_control_fields()
2132 for k in fields.keys():
2135 val = str(fields[k])
2136 except UnicodeEncodeError:
2137 # Fall back to UTF-8
2139 val = fields[k].encode('utf-8')
2140 except UnicodeEncodeError:
2141 # Finally try iso8859-1
2142 val = fields[k].encode('iso8859-1')
2143 # Otherwise we allow the exception to percolate up and we cause
2144 # a reject as someone is playing silly buggers
2146 obj.metadata[get_or_set_metadatakey(k, session)] = val
2148 session.commit_or_flush()
2150 __all__.append('import_metadata_into_db')
2152 ################################################################################
2154 class SrcFormat(object):
2155 def __init__(self, *args, **kwargs):
2159 return '<SrcFormat %s>' % (self.format_name)
2161 __all__.append('SrcFormat')
2163 ################################################################################
2165 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2166 ('SuiteID', 'suite_id'),
2167 ('Version', 'version'),
2168 ('Origin', 'origin'),
2170 ('Description', 'description'),
2171 ('Untouchable', 'untouchable'),
2172 ('Announce', 'announce'),
2173 ('Codename', 'codename'),
2174 ('OverrideCodename', 'overridecodename'),
2175 ('ValidTime', 'validtime'),
2176 ('Priority', 'priority'),
2177 ('NotAutomatic', 'notautomatic'),
2178 ('CopyChanges', 'copychanges'),
2179 ('OverrideSuite', 'overridesuite')]
2181 # Why the heck don't we have any UNIQUE constraints in table suite?
2182 # TODO: Add UNIQUE constraints for appropriate columns.
2183 class Suite(ORMObject):
2184 def __init__(self, suite_name = None, version = None):
2185 self.suite_name = suite_name
2186 self.version = version
2188 def properties(self):
2189 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2192 def not_null_constraints(self):
2193 return ['suite_name']
2195 def __eq__(self, val):
2196 if isinstance(val, str):
2197 return (self.suite_name == val)
2198 # This signals to use the normal comparison operator
2199 return NotImplemented
2201 def __ne__(self, val):
2202 if isinstance(val, str):
2203 return (self.suite_name != val)
2204 # This signals to use the normal comparison operator
2205 return NotImplemented
2209 for disp, field in SUITE_FIELDS:
2210 val = getattr(self, field, None)
2212 ret.append("%s: %s" % (disp, val))
2214 return "\n".join(ret)
2216 def get_architectures(self, skipsrc=False, skipall=False):
2218 Returns list of Architecture objects
2220 @type skipsrc: boolean
2221 @param skipsrc: Whether to skip returning the 'source' architecture entry
2224 @type skipall: boolean
2225 @param skipall: Whether to skip returning the 'all' architecture entry
2229 @return: list of Architecture objects for the given name (may be empty)
2232 q = object_session(self).query(Architecture).with_parent(self)
2234 q = q.filter(Architecture.arch_string != 'source')
2236 q = q.filter(Architecture.arch_string != 'all')
2237 return q.order_by(Architecture.arch_string).all()
2239 def get_sources(self, source):
2241 Returns a query object representing DBSource that is part of C{suite}.
2243 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2245 @type source: string
2246 @param source: source package name
2248 @rtype: sqlalchemy.orm.query.Query
2249 @return: a query of DBSource
2253 session = object_session(self)
2254 return session.query(DBSource).filter_by(source = source). \
2257 def get_overridesuite(self):
2258 if self.overridesuite is None:
2261 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2265 return os.path.join(self.archive.path, 'dists', self.suite_name)
2267 __all__.append('Suite')
2270 def get_suite(suite, session=None):
2272 Returns Suite object for given C{suite name}.
2275 @param suite: The name of the suite
2277 @type session: Session
2278 @param session: Optional SQLA session object (a temporary one will be
2279 generated if not supplied)
2282 @return: Suite object for the requested suite name (None if not present)
2285 q = session.query(Suite).filter_by(suite_name=suite)
2289 except NoResultFound:
2292 __all__.append('get_suite')
2294 ################################################################################
2297 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2299 Returns list of Architecture objects for given C{suite} name. The list is
2300 empty if suite does not exist.
2303 @param suite: Suite name to search for
2305 @type skipsrc: boolean
2306 @param skipsrc: Whether to skip returning the 'source' architecture entry
2309 @type skipall: boolean
2310 @param skipall: Whether to skip returning the 'all' architecture entry
2313 @type session: Session
2314 @param session: Optional SQL session object (a temporary one will be
2315 generated if not supplied)
2318 @return: list of Architecture objects for the given name (may be empty)
2322 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2323 except AttributeError:
2326 __all__.append('get_suite_architectures')
2328 ################################################################################
2330 class Uid(ORMObject):
2331 def __init__(self, uid = None, name = None):
2335 def __eq__(self, val):
2336 if isinstance(val, str):
2337 return (self.uid == val)
2338 # This signals to use the normal comparison operator
2339 return NotImplemented
2341 def __ne__(self, val):
2342 if isinstance(val, str):
2343 return (self.uid != val)
2344 # This signals to use the normal comparison operator
2345 return NotImplemented
2347 def properties(self):
2348 return ['uid', 'name', 'fingerprint']
2350 def not_null_constraints(self):
2353 __all__.append('Uid')
2356 def get_or_set_uid(uidname, session=None):
2358 Returns uid object for given uidname.
2360 If no matching uidname is found, a row is inserted.
2362 @type uidname: string
2363 @param uidname: The uid to add
2365 @type session: SQLAlchemy
2366 @param session: Optional SQL session object (a temporary one will be
2367 generated if not supplied). If not passed, a commit will be performed at
2368 the end of the function, otherwise the caller is responsible for commiting.
2371 @return: the uid object for the given uidname
2374 q = session.query(Uid).filter_by(uid=uidname)
2378 except NoResultFound:
2382 session.commit_or_flush()
2387 __all__.append('get_or_set_uid')
2390 def get_uid_from_fingerprint(fpr, session=None):
2391 q = session.query(Uid)
2392 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2396 except NoResultFound:
2399 __all__.append('get_uid_from_fingerprint')
2401 ################################################################################
2403 class MetadataKey(ORMObject):
2404 def __init__(self, key = None):
2407 def properties(self):
2410 def not_null_constraints(self):
2413 __all__.append('MetadataKey')
2416 def get_or_set_metadatakey(keyname, session=None):
2418 Returns MetadataKey object for given uidname.
2420 If no matching keyname is found, a row is inserted.
2422 @type uidname: string
2423 @param uidname: The keyname to add
2425 @type session: SQLAlchemy
2426 @param session: Optional SQL session object (a temporary one will be
2427 generated if not supplied). If not passed, a commit will be performed at
2428 the end of the function, otherwise the caller is responsible for commiting.
2431 @return: the metadatakey object for the given keyname
2434 q = session.query(MetadataKey).filter_by(key=keyname)
2438 except NoResultFound:
2439 ret = MetadataKey(keyname)
2441 session.commit_or_flush()
2445 __all__.append('get_or_set_metadatakey')
2447 ################################################################################
2449 class BinaryMetadata(ORMObject):
2450 def __init__(self, key = None, value = None, binary = None):
2453 self.binary = binary
2455 def properties(self):
2456 return ['binary', 'key', 'value']
2458 def not_null_constraints(self):
2461 __all__.append('BinaryMetadata')
2463 ################################################################################
2465 class SourceMetadata(ORMObject):
2466 def __init__(self, key = None, value = None, source = None):
2469 self.source = source
2471 def properties(self):
2472 return ['source', 'key', 'value']
2474 def not_null_constraints(self):
2477 __all__.append('SourceMetadata')
2479 ################################################################################
2481 class VersionCheck(ORMObject):
2482 def __init__(self, *args, **kwargs):
2485 def properties(self):
2486 #return ['suite_id', 'check', 'reference_id']
2489 def not_null_constraints(self):
2490 return ['suite', 'check', 'reference']
2492 __all__.append('VersionCheck')
2495 def get_version_checks(suite_name, check = None, session = None):
2496 suite = get_suite(suite_name, session)
2498 # Make sure that what we return is iterable so that list comprehensions
2499 # involving this don't cause a traceback
2501 q = session.query(VersionCheck).filter_by(suite=suite)
2503 q = q.filter_by(check=check)
2506 __all__.append('get_version_checks')
2508 ################################################################################
2510 class DBConn(object):
2512 database module init.
2516 def __init__(self, *args, **kwargs):
2517 self.__dict__ = self.__shared_state
2519 if not getattr(self, 'initialised', False):
2520 self.initialised = True
2521 self.debug = kwargs.has_key('debug')
2524 def __setuptables(self):
2527 'acl_architecture_map',
2528 'acl_fingerprint_map',
2535 'binaries_metadata',
2543 'external_overrides',
2544 'extra_src_references',
2546 'files_archive_map',
2552 # TODO: the maintainer column in table override should be removed.
2556 'policy_queue_upload',
2557 'policy_queue_upload_binaries_map',
2558 'policy_queue_byhand_file',
2561 'signature_history',
2570 'suite_architectures',
2571 'suite_build_queue_copy',
2572 'suite_src_formats',
2578 'almost_obsolete_all_associations',
2579 'almost_obsolete_src_associations',
2580 'any_associations_source',
2581 'bin_associations_binaries',
2582 'binaries_suite_arch',
2585 'newest_all_associations',
2586 'newest_any_associations',
2588 'newest_src_association',
2589 'obsolete_all_associations',
2590 'obsolete_any_associations',
2591 'obsolete_any_by_all_associations',
2592 'obsolete_src_associations',
2594 'src_associations_bin',
2595 'src_associations_src',
2596 'suite_arch_by_name',
2599 for table_name in tables:
2600 table = Table(table_name, self.db_meta, \
2601 autoload=True, useexisting=True)
2602 setattr(self, 'tbl_%s' % table_name, table)
2604 for view_name in views:
2605 view = Table(view_name, self.db_meta, autoload=True)
2606 setattr(self, 'view_%s' % view_name, view)
2608 def __setupmappers(self):
2609 mapper(Architecture, self.tbl_architecture,
2610 properties = dict(arch_id = self.tbl_architecture.c.id,
2611 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2612 order_by=self.tbl_suite.c.suite_name,
2613 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2614 extension = validator)
2616 mapper(ACL, self.tbl_acl,
2618 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2619 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2620 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2621 per_source = relation(ACLPerSource, collection_class=set),
2624 mapper(ACLPerSource, self.tbl_acl_per_source,
2626 acl = relation(ACL),
2627 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2628 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2631 mapper(Archive, self.tbl_archive,
2632 properties = dict(archive_id = self.tbl_archive.c.id,
2633 archive_name = self.tbl_archive.c.name))
2635 mapper(ArchiveFile, self.tbl_files_archive_map,
2636 properties = dict(archive = relation(Archive, backref='files'),
2637 component = relation(Component),
2638 file = relation(PoolFile, backref='archives')))
2640 mapper(BuildQueue, self.tbl_build_queue,
2641 properties = dict(queue_id = self.tbl_build_queue.c.id,
2642 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2644 mapper(DBBinary, self.tbl_binaries,
2645 properties = dict(binary_id = self.tbl_binaries.c.id,
2646 package = self.tbl_binaries.c.package,
2647 version = self.tbl_binaries.c.version,
2648 maintainer_id = self.tbl_binaries.c.maintainer,
2649 maintainer = relation(Maintainer),
2650 source_id = self.tbl_binaries.c.source,
2651 source = relation(DBSource, backref='binaries'),
2652 arch_id = self.tbl_binaries.c.architecture,
2653 architecture = relation(Architecture),
2654 poolfile_id = self.tbl_binaries.c.file,
2655 poolfile = relation(PoolFile),
2656 binarytype = self.tbl_binaries.c.type,
2657 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2658 fingerprint = relation(Fingerprint),
2659 install_date = self.tbl_binaries.c.install_date,
2660 suites = relation(Suite, secondary=self.tbl_bin_associations,
2661 backref=backref('binaries', lazy='dynamic')),
2662 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2663 backref=backref('extra_binary_references', lazy='dynamic')),
2664 key = relation(BinaryMetadata, cascade='all',
2665 collection_class=attribute_mapped_collection('key'))),
2666 extension = validator)
2668 mapper(Component, self.tbl_component,
2669 properties = dict(component_id = self.tbl_component.c.id,
2670 component_name = self.tbl_component.c.name),
2671 extension = validator)
2673 mapper(DBConfig, self.tbl_config,
2674 properties = dict(config_id = self.tbl_config.c.id))
2676 mapper(DSCFile, self.tbl_dsc_files,
2677 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2678 source_id = self.tbl_dsc_files.c.source,
2679 source = relation(DBSource),
2680 poolfile_id = self.tbl_dsc_files.c.file,
2681 poolfile = relation(PoolFile)))
2683 mapper(ExternalOverride, self.tbl_external_overrides,
2685 suite_id = self.tbl_external_overrides.c.suite,
2686 suite = relation(Suite),
2687 component_id = self.tbl_external_overrides.c.component,
2688 component = relation(Component)))
2690 mapper(PoolFile, self.tbl_files,
2691 properties = dict(file_id = self.tbl_files.c.id,
2692 filesize = self.tbl_files.c.size),
2693 extension = validator)
2695 mapper(Fingerprint, self.tbl_fingerprint,
2696 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2697 uid_id = self.tbl_fingerprint.c.uid,
2698 uid = relation(Uid),
2699 keyring_id = self.tbl_fingerprint.c.keyring,
2700 keyring = relation(Keyring),
2701 acl = relation(ACL)),
2702 extension = validator)
2704 mapper(Keyring, self.tbl_keyrings,
2705 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2706 keyring_id = self.tbl_keyrings.c.id,
2707 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2709 mapper(DBChange, self.tbl_changes,
2710 properties = dict(change_id = self.tbl_changes.c.id,
2711 seen = self.tbl_changes.c.seen,
2712 source = self.tbl_changes.c.source,
2713 binaries = self.tbl_changes.c.binaries,
2714 architecture = self.tbl_changes.c.architecture,
2715 distribution = self.tbl_changes.c.distribution,
2716 urgency = self.tbl_changes.c.urgency,
2717 maintainer = self.tbl_changes.c.maintainer,
2718 changedby = self.tbl_changes.c.changedby,
2719 date = self.tbl_changes.c.date,
2720 version = self.tbl_changes.c.version))
2722 mapper(Maintainer, self.tbl_maintainer,
2723 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2724 maintains_sources = relation(DBSource, backref='maintainer',
2725 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2726 changed_sources = relation(DBSource, backref='changedby',
2727 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2728 extension = validator)
2730 mapper(NewComment, self.tbl_new_comments,
2731 properties = dict(comment_id = self.tbl_new_comments.c.id,
2732 policy_queue = relation(PolicyQueue)))
2734 mapper(Override, self.tbl_override,
2735 properties = dict(suite_id = self.tbl_override.c.suite,
2736 suite = relation(Suite, \
2737 backref=backref('overrides', lazy='dynamic')),
2738 package = self.tbl_override.c.package,
2739 component_id = self.tbl_override.c.component,
2740 component = relation(Component, \
2741 backref=backref('overrides', lazy='dynamic')),
2742 priority_id = self.tbl_override.c.priority,
2743 priority = relation(Priority, \
2744 backref=backref('overrides', lazy='dynamic')),
2745 section_id = self.tbl_override.c.section,
2746 section = relation(Section, \
2747 backref=backref('overrides', lazy='dynamic')),
2748 overridetype_id = self.tbl_override.c.type,
2749 overridetype = relation(OverrideType, \
2750 backref=backref('overrides', lazy='dynamic'))))
2752 mapper(OverrideType, self.tbl_override_type,
2753 properties = dict(overridetype = self.tbl_override_type.c.type,
2754 overridetype_id = self.tbl_override_type.c.id))
2756 mapper(PolicyQueue, self.tbl_policy_queue,
2757 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2758 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2760 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2762 changes = relation(DBChange),
2763 policy_queue = relation(PolicyQueue, backref='uploads'),
2764 target_suite = relation(Suite),
2765 source = relation(DBSource),
2766 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2769 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2771 upload = relation(PolicyQueueUpload, backref='byhand'),
2775 mapper(Priority, self.tbl_priority,
2776 properties = dict(priority_id = self.tbl_priority.c.id))
2778 mapper(Section, self.tbl_section,
2779 properties = dict(section_id = self.tbl_section.c.id,
2780 section=self.tbl_section.c.section))
2782 mapper(SignatureHistory, self.tbl_signature_history)
2784 mapper(DBSource, self.tbl_source,
2785 properties = dict(source_id = self.tbl_source.c.id,
2786 version = self.tbl_source.c.version,
2787 maintainer_id = self.tbl_source.c.maintainer,
2788 poolfile_id = self.tbl_source.c.file,
2789 poolfile = relation(PoolFile),
2790 fingerprint_id = self.tbl_source.c.sig_fpr,
2791 fingerprint = relation(Fingerprint),
2792 changedby_id = self.tbl_source.c.changedby,
2793 srcfiles = relation(DSCFile,
2794 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2795 suites = relation(Suite, secondary=self.tbl_src_associations,
2796 backref=backref('sources', lazy='dynamic')),
2797 uploaders = relation(Maintainer,
2798 secondary=self.tbl_src_uploaders),
2799 key = relation(SourceMetadata, cascade='all',
2800 collection_class=attribute_mapped_collection('key'))),
2801 extension = validator)
2803 mapper(SrcFormat, self.tbl_src_format,
2804 properties = dict(src_format_id = self.tbl_src_format.c.id,
2805 format_name = self.tbl_src_format.c.format_name))
2807 mapper(Suite, self.tbl_suite,
2808 properties = dict(suite_id = self.tbl_suite.c.id,
2809 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2810 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2811 copy_queues = relation(BuildQueue,
2812 secondary=self.tbl_suite_build_queue_copy),
2813 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2814 backref=backref('suites', lazy='dynamic')),
2815 archive = relation(Archive, backref='suites'),
2816 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2817 components = relation(Component, secondary=self.tbl_component_suite,
2818 order_by=self.tbl_component.c.ordering,
2819 backref=backref('suites'))),
2820 extension = validator)
2822 mapper(Uid, self.tbl_uid,
2823 properties = dict(uid_id = self.tbl_uid.c.id,
2824 fingerprint = relation(Fingerprint)),
2825 extension = validator)
2827 mapper(BinContents, self.tbl_bin_contents,
2829 binary = relation(DBBinary,
2830 backref=backref('contents', lazy='dynamic', cascade='all')),
2831 file = self.tbl_bin_contents.c.file))
2833 mapper(SrcContents, self.tbl_src_contents,
2835 source = relation(DBSource,
2836 backref=backref('contents', lazy='dynamic', cascade='all')),
2837 file = self.tbl_src_contents.c.file))
2839 mapper(MetadataKey, self.tbl_metadata_keys,
2841 key_id = self.tbl_metadata_keys.c.key_id,
2842 key = self.tbl_metadata_keys.c.key))
2844 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2846 binary_id = self.tbl_binaries_metadata.c.bin_id,
2847 binary = relation(DBBinary),
2848 key_id = self.tbl_binaries_metadata.c.key_id,
2849 key = relation(MetadataKey),
2850 value = self.tbl_binaries_metadata.c.value))
2852 mapper(SourceMetadata, self.tbl_source_metadata,
2854 source_id = self.tbl_source_metadata.c.src_id,
2855 source = relation(DBSource),
2856 key_id = self.tbl_source_metadata.c.key_id,
2857 key = relation(MetadataKey),
2858 value = self.tbl_source_metadata.c.value))
2860 mapper(VersionCheck, self.tbl_version_check,
2862 suite_id = self.tbl_version_check.c.suite,
2863 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2864 reference_id = self.tbl_version_check.c.reference,
2865 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2867 ## Connection functions
2868 def __createconn(self):
2869 from config import Config
2871 if cnf.has_key("DB::Service"):
2872 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2873 elif cnf.has_key("DB::Host"):
2875 connstr = "postgresql://%s" % cnf["DB::Host"]
2876 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2877 connstr += ":%s" % cnf["DB::Port"]
2878 connstr += "/%s" % cnf["DB::Name"]
2881 connstr = "postgresql:///%s" % cnf["DB::Name"]
2882 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2883 connstr += "?port=%s" % cnf["DB::Port"]
2885 engine_args = { 'echo': self.debug }
2886 if cnf.has_key('DB::PoolSize'):
2887 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2888 if cnf.has_key('DB::MaxOverflow'):
2889 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2890 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2891 cnf['DB::Unicode'] == 'false':
2892 engine_args['use_native_unicode'] = False
2894 # Monkey patch a new dialect in in order to support service= syntax
2895 import sqlalchemy.dialects.postgresql
2896 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2897 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2898 def create_connect_args(self, url):
2899 if str(url).startswith('postgresql://service='):
2901 servicename = str(url)[21:]
2902 return (['service=%s' % servicename], {})
2904 return PGDialect_psycopg2.create_connect_args(self, url)
2906 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2909 self.db_pg = create_engine(connstr, **engine_args)
2910 self.db_meta = MetaData()
2911 self.db_meta.bind = self.db_pg
2912 self.db_smaker = sessionmaker(bind=self.db_pg,
2916 self.__setuptables()
2917 self.__setupmappers()
2919 except OperationalError as e:
2921 utils.fubar("Cannot connect to database (%s)" % str(e))
2923 self.pid = os.getpid()
2925 def session(self, work_mem = 0):
2927 Returns a new session object. If a work_mem parameter is provided a new
2928 transaction is started and the work_mem parameter is set for this
2929 transaction. The work_mem parameter is measured in MB. A default value
2930 will be used if the parameter is not set.
2932 # reinitialize DBConn in new processes
2933 if self.pid != os.getpid():
2936 session = self.db_smaker()
2938 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2941 __all__.append('DBConn')