5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 import daklib.daksubprocess
39 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from tarfile import TarFile
57 from inspect import getargspec
60 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
62 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
63 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
64 from sqlalchemy import types as sqltypes
65 from sqlalchemy.orm.collections import attribute_mapped_collection
66 from sqlalchemy.ext.associationproxy import association_proxy
68 # Don't remove this, we re-export the exceptions to scripts which import us
69 from sqlalchemy.exc import *
70 from sqlalchemy.orm.exc import NoResultFound
72 # Only import Config until Queue stuff is changed to store its config
74 from config import Config
75 from textutils import fix_maintainer
76 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
78 # suppress some deprecation warnings in squeeze related to sqlalchemy
80 warnings.filterwarnings('ignore', \
81 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
83 warnings.filterwarnings('ignore', \
84 "Predicate of partial index .* ignored during reflection", \
88 ################################################################################
90 # Patch in support for the debversion field type so that it works during
94 # that is for sqlalchemy 0.6
95 UserDefinedType = sqltypes.UserDefinedType
97 # this one for sqlalchemy 0.5
98 UserDefinedType = sqltypes.TypeEngine
100 class DebVersion(UserDefinedType):
101 def get_col_spec(self):
104 def bind_processor(self, dialect):
107 # ' = None' is needed for sqlalchemy 0.5:
108 def result_processor(self, dialect, coltype = None):
111 sa_major_version = sqlalchemy.__version__[0:3]
112 if sa_major_version in ["0.5", "0.6", "0.7", "0.8"]:
113 from sqlalchemy.databases import postgres
114 postgres.ischema_names['debversion'] = DebVersion
116 raise Exception("dak only ported to SQLA versions 0.5 to 0.8. See daklib/dbconn.py")
118 ################################################################################
120 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
122 ################################################################################
124 def session_wrapper(fn):
126 Wrapper around common ".., session=None):" handling. If the wrapped
127 function is called without passing 'session', we create a local one
128 and destroy it when the function ends.
130 Also attaches a commit_or_flush method to the session; if we created a
131 local session, this is a synonym for session.commit(), otherwise it is a
132 synonym for session.flush().
135 def wrapped(*args, **kwargs):
136 private_transaction = False
138 # Find the session object
139 session = kwargs.get('session')
142 if len(args) <= len(getargspec(fn)[0]) - 1:
143 # No session specified as last argument or in kwargs
144 private_transaction = True
145 session = kwargs['session'] = DBConn().session()
147 # Session is last argument in args
151 session = args[-1] = DBConn().session()
152 private_transaction = True
154 if private_transaction:
155 session.commit_or_flush = session.commit
157 session.commit_or_flush = session.flush
160 return fn(*args, **kwargs)
162 if private_transaction:
163 # We created a session; close it.
166 wrapped.__doc__ = fn.__doc__
167 wrapped.func_name = fn.func_name
171 __all__.append('session_wrapper')
173 ################################################################################
175 class ORMObject(object):
177 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
178 derived classes must implement the properties() method.
181 def properties(self):
183 This method should be implemented by all derived classes and returns a
184 list of the important properties. The properties 'created' and
185 'modified' will be added automatically. A suffix '_count' should be
186 added to properties that are lists or query objects. The most important
187 property name should be returned as the first element in the list
188 because it is used by repr().
194 Returns a JSON representation of the object based on the properties
195 returned from the properties() method.
198 # add created and modified
199 all_properties = self.properties() + ['created', 'modified']
200 for property in all_properties:
201 # check for list or query
202 if property[-6:] == '_count':
203 real_property = property[:-6]
204 if not hasattr(self, real_property):
206 value = getattr(self, real_property)
207 if hasattr(value, '__len__'):
210 elif hasattr(value, 'count'):
211 # query (but not during validation)
212 if self.in_validation:
214 value = value.count()
216 raise KeyError('Do not understand property %s.' % property)
218 if not hasattr(self, property):
221 value = getattr(self, property)
225 elif isinstance(value, ORMObject):
226 # use repr() for ORMObject types
229 # we want a string for all other types because json cannot
232 data[property] = value
233 return json.dumps(data)
237 Returns the name of the class.
239 return type(self).__name__
243 Returns a short string representation of the object using the first
244 element from the properties() method.
246 primary_property = self.properties()[0]
247 value = getattr(self, primary_property)
248 return '<%s %s>' % (self.classname(), str(value))
252 Returns a human readable form of the object using the properties()
255 return '<%s %s>' % (self.classname(), self.json())
257 def not_null_constraints(self):
259 Returns a list of properties that must be not NULL. Derived classes
260 should override this method if needed.
264 validation_message = \
265 "Validation failed because property '%s' must not be empty in object\n%s"
267 in_validation = False
271 This function validates the not NULL constraints as returned by
272 not_null_constraints(). It raises the DBUpdateError exception if
275 for property in self.not_null_constraints():
276 # TODO: It is a bit awkward that the mapper configuration allow
277 # directly setting the numeric _id columns. We should get rid of it
279 if hasattr(self, property + '_id') and \
280 getattr(self, property + '_id') is not None:
282 if not hasattr(self, property) or getattr(self, property) is None:
283 # str() might lead to races due to a 2nd flush
284 self.in_validation = True
285 message = self.validation_message % (property, str(self))
286 self.in_validation = False
287 raise DBUpdateError(message)
291 def get(cls, primary_key, session = None):
293 This is a support function that allows getting an object by its primary
296 Architecture.get(3[, session])
298 instead of the more verbose
300 session.query(Architecture).get(3)
302 return session.query(cls).get(primary_key)
304 def session(self, replace = False):
306 Returns the current session that is associated with the object. May
307 return None is object is in detached state.
310 return object_session(self)
312 def clone(self, session = None):
314 Clones the current object in a new session and returns the new clone. A
315 fresh session is created if the optional session parameter is not
316 provided. The function will fail if a session is provided and has
319 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
320 an existing object to allow several threads to work with their own
321 instances of an ORMObject.
323 WARNING: Only persistent (committed) objects can be cloned. Changes
324 made to the original object that are not committed yet will get lost.
325 The session of the new object will always be rolled back to avoid
329 if self.session() is None:
330 raise RuntimeError( \
331 'Method clone() failed for detached object:\n%s' % self)
332 self.session().flush()
333 mapper = object_mapper(self)
334 primary_key = mapper.primary_key_from_instance(self)
335 object_class = self.__class__
337 session = DBConn().session()
338 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
339 raise RuntimeError( \
340 'Method clone() failed due to unflushed changes in session.')
341 new_object = session.query(object_class).get(primary_key)
343 if new_object is None:
344 raise RuntimeError( \
345 'Method clone() failed for non-persistent object:\n%s' % self)
348 __all__.append('ORMObject')
350 ################################################################################
352 class Validator(MapperExtension):
354 This class calls the validate() method for each instance for the
355 'before_update' and 'before_insert' events. A global object validator is
356 used for configuring the individual mappers.
359 def before_update(self, mapper, connection, instance):
363 def before_insert(self, mapper, connection, instance):
367 validator = Validator()
369 ################################################################################
371 class ACL(ORMObject):
373 return "<ACL {0}>".format(self.name)
375 __all__.append('ACL')
377 class ACLPerSource(ORMObject):
379 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
381 __all__.append('ACLPerSource')
383 ################################################################################
385 class Architecture(ORMObject):
386 def __init__(self, arch_string = None, description = None):
387 self.arch_string = arch_string
388 self.description = description
390 def __eq__(self, val):
391 if isinstance(val, str):
392 return (self.arch_string== val)
393 # This signals to use the normal comparison operator
394 return NotImplemented
396 def __ne__(self, val):
397 if isinstance(val, str):
398 return (self.arch_string != val)
399 # This signals to use the normal comparison operator
400 return NotImplemented
402 def properties(self):
403 return ['arch_string', 'arch_id', 'suites_count']
405 def not_null_constraints(self):
406 return ['arch_string']
408 __all__.append('Architecture')
411 def get_architecture(architecture, session=None):
413 Returns database id for given C{architecture}.
415 @type architecture: string
416 @param architecture: The name of the architecture
418 @type session: Session
419 @param session: Optional SQLA session object (a temporary one will be
420 generated if not supplied)
423 @return: Architecture object for the given arch (None if not present)
426 q = session.query(Architecture).filter_by(arch_string=architecture)
430 except NoResultFound:
433 __all__.append('get_architecture')
435 ################################################################################
437 class Archive(object):
438 def __init__(self, *args, **kwargs):
442 return '<Archive %s>' % self.archive_name
444 __all__.append('Archive')
447 def get_archive(archive, session=None):
449 returns database id for given C{archive}.
451 @type archive: string
452 @param archive: the name of the arhive
454 @type session: Session
455 @param session: Optional SQLA session object (a temporary one will be
456 generated if not supplied)
459 @return: Archive object for the given name (None if not present)
462 archive = archive.lower()
464 q = session.query(Archive).filter_by(archive_name=archive)
468 except NoResultFound:
471 __all__.append('get_archive')
473 ################################################################################
475 class ArchiveFile(object):
476 def __init__(self, archive=None, component=None, file=None):
477 self.archive = archive
478 self.component = component
482 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
484 __all__.append('ArchiveFile')
486 ################################################################################
488 class BinContents(ORMObject):
489 def __init__(self, file = None, binary = None):
493 def properties(self):
494 return ['file', 'binary']
496 __all__.append('BinContents')
498 ################################################################################
500 class DBBinary(ORMObject):
501 def __init__(self, package = None, source = None, version = None, \
502 maintainer = None, architecture = None, poolfile = None, \
503 binarytype = 'deb', fingerprint=None):
504 self.package = package
506 self.version = version
507 self.maintainer = maintainer
508 self.architecture = architecture
509 self.poolfile = poolfile
510 self.binarytype = binarytype
511 self.fingerprint = fingerprint
515 return self.binary_id
517 def properties(self):
518 return ['package', 'version', 'maintainer', 'source', 'architecture', \
519 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
520 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
522 def not_null_constraints(self):
523 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
526 metadata = association_proxy('key', 'value')
528 def scan_contents(self):
530 Yields the contents of the package. Only regular files are yielded and
531 the path names are normalized after converting them from either utf-8
532 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
533 package does not contain any regular file.
535 fullpath = self.poolfile.fullpath
536 dpkg_cmd = ('dpkg-deb', '--fsys-tarfile', fullpath)
537 dpkg = daklib.daksubprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE)
538 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
539 for member in tar.getmembers():
540 if not member.isdir():
541 name = normpath(member.name)
542 # enforce proper utf-8 encoding
545 except UnicodeDecodeError:
546 name = name.decode('iso8859-1').encode('utf-8')
552 def read_control(self):
554 Reads the control information from a binary.
557 @return: stanza text of the control section.
560 fullpath = self.poolfile.fullpath
561 deb_file = open(fullpath, 'r')
562 stanza = utils.deb_extract_control(deb_file)
567 def read_control_fields(self):
569 Reads the control information from a binary and return
573 @return: fields of the control section as a dictionary.
575 stanza = self.read_control()
576 return apt_pkg.TagSection(stanza)
578 __all__.append('DBBinary')
581 def get_suites_binary_in(package, session=None):
583 Returns list of Suite objects which given C{package} name is in
586 @param package: DBBinary package name to search for
589 @return: list of Suite objects for the given package
592 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
594 __all__.append('get_suites_binary_in')
597 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
599 Returns the component name of the newest binary package in suite_list or
600 None if no package is found. The result can be optionally filtered by a list
601 of architecture names.
604 @param package: DBBinary package name to search for
606 @type suite_list: list of str
607 @param suite_list: list of suite_name items
609 @type arch_list: list of str
610 @param arch_list: optional list of arch_string items that defaults to []
612 @rtype: str or NoneType
613 @return: name of component or None
616 q = session.query(DBBinary).filter_by(package = package). \
617 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
618 if len(arch_list) > 0:
619 q = q.join(DBBinary.architecture). \
620 filter(Architecture.arch_string.in_(arch_list))
621 binary = q.order_by(desc(DBBinary.version)).first()
625 return binary.poolfile.component.component_name
627 __all__.append('get_component_by_package_suite')
629 ################################################################################
631 class BuildQueue(object):
632 def __init__(self, *args, **kwargs):
636 return '<BuildQueue %s>' % self.queue_name
638 __all__.append('BuildQueue')
640 ################################################################################
642 class Component(ORMObject):
643 def __init__(self, component_name = None):
644 self.component_name = component_name
646 def __eq__(self, val):
647 if isinstance(val, str):
648 return (self.component_name == val)
649 # This signals to use the normal comparison operator
650 return NotImplemented
652 def __ne__(self, val):
653 if isinstance(val, str):
654 return (self.component_name != val)
655 # This signals to use the normal comparison operator
656 return NotImplemented
658 def properties(self):
659 return ['component_name', 'component_id', 'description', \
660 'meets_dfsg', 'overrides_count']
662 def not_null_constraints(self):
663 return ['component_name']
666 __all__.append('Component')
669 def get_component(component, session=None):
671 Returns database id for given C{component}.
673 @type component: string
674 @param component: The name of the override type
677 @return: the database id for the given component
680 component = component.lower()
682 q = session.query(Component).filter_by(component_name=component)
686 except NoResultFound:
689 __all__.append('get_component')
692 def get_mapped_component(component_name, session=None):
693 """get component after mappings
695 Evaluate component mappings from ComponentMappings in dak.conf for the
696 given component name.
698 @todo: ansgar wants to get rid of this. It's currently only used for
701 @type component_name: str
702 @param component_name: component name
704 @param session: database session
706 @rtype: L{daklib.dbconn.Component} or C{None}
707 @return: component after applying maps or C{None}
710 for m in cnf.value_list("ComponentMappings"):
711 (src, dst) = m.split()
712 if component_name == src:
714 component = session.query(Component).filter_by(component_name=component_name).first()
717 __all__.append('get_mapped_component')
720 def get_component_names(session=None):
722 Returns list of strings of component names.
725 @return: list of strings of component names
728 return [ x.component_name for x in session.query(Component).all() ]
730 __all__.append('get_component_names')
732 ################################################################################
734 class DBConfig(object):
735 def __init__(self, *args, **kwargs):
739 return '<DBConfig %s>' % self.name
741 __all__.append('DBConfig')
743 ################################################################################
746 def get_or_set_contents_file_id(filename, session=None):
748 Returns database id for given filename.
750 If no matching file is found, a row is inserted.
752 @type filename: string
753 @param filename: The filename
754 @type session: SQLAlchemy
755 @param session: Optional SQL session object (a temporary one will be
756 generated if not supplied). If not passed, a commit will be performed at
757 the end of the function, otherwise the caller is responsible for commiting.
760 @return: the database id for the given component
763 q = session.query(ContentFilename).filter_by(filename=filename)
766 ret = q.one().cafilename_id
767 except NoResultFound:
768 cf = ContentFilename()
769 cf.filename = filename
771 session.commit_or_flush()
772 ret = cf.cafilename_id
776 __all__.append('get_or_set_contents_file_id')
779 def get_contents(suite, overridetype, section=None, session=None):
781 Returns contents for a suite / overridetype combination, limiting
782 to a section if not None.
785 @param suite: Suite object
787 @type overridetype: OverrideType
788 @param overridetype: OverrideType object
790 @type section: Section
791 @param section: Optional section object to limit results to
793 @type session: SQLAlchemy
794 @param session: Optional SQL session object (a temporary one will be
795 generated if not supplied)
798 @return: ResultsProxy object set up to return tuples of (filename, section,
802 # find me all of the contents for a given suite
803 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
807 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
808 JOIN content_file_names n ON (c.filename=n.id)
809 JOIN binaries b ON (b.id=c.binary_pkg)
810 JOIN override o ON (o.package=b.package)
811 JOIN section s ON (s.id=o.section)
812 WHERE o.suite = :suiteid AND o.type = :overridetypeid
813 AND b.type=:overridetypename"""
815 vals = {'suiteid': suite.suite_id,
816 'overridetypeid': overridetype.overridetype_id,
817 'overridetypename': overridetype.overridetype}
819 if section is not None:
820 contents_q += " AND s.id = :sectionid"
821 vals['sectionid'] = section.section_id
823 contents_q += " ORDER BY fn"
825 return session.execute(contents_q, vals)
827 __all__.append('get_contents')
829 ################################################################################
831 class ContentFilepath(object):
832 def __init__(self, *args, **kwargs):
836 return '<ContentFilepath %s>' % self.filepath
838 __all__.append('ContentFilepath')
841 def get_or_set_contents_path_id(filepath, session=None):
843 Returns database id for given path.
845 If no matching file is found, a row is inserted.
847 @type filepath: string
848 @param filepath: The filepath
850 @type session: SQLAlchemy
851 @param session: Optional SQL session object (a temporary one will be
852 generated if not supplied). If not passed, a commit will be performed at
853 the end of the function, otherwise the caller is responsible for commiting.
856 @return: the database id for the given path
859 q = session.query(ContentFilepath).filter_by(filepath=filepath)
862 ret = q.one().cafilepath_id
863 except NoResultFound:
864 cf = ContentFilepath()
865 cf.filepath = filepath
867 session.commit_or_flush()
868 ret = cf.cafilepath_id
872 __all__.append('get_or_set_contents_path_id')
874 ################################################################################
876 class ContentAssociation(object):
877 def __init__(self, *args, **kwargs):
881 return '<ContentAssociation %s>' % self.ca_id
883 __all__.append('ContentAssociation')
885 def insert_content_paths(binary_id, fullpaths, session=None):
887 Make sure given path is associated with given binary id
890 @param binary_id: the id of the binary
891 @type fullpaths: list
892 @param fullpaths: the list of paths of the file being associated with the binary
893 @type session: SQLAlchemy session
894 @param session: Optional SQLAlchemy session. If this is passed, the caller
895 is responsible for ensuring a transaction has begun and committing the
896 results or rolling back based on the result code. If not passed, a commit
897 will be performed at the end of the function, otherwise the caller is
898 responsible for commiting.
900 @return: True upon success
905 session = DBConn().session()
910 def generate_path_dicts():
911 for fullpath in fullpaths:
912 if fullpath.startswith( './' ):
913 fullpath = fullpath[2:]
915 yield {'filename':fullpath, 'id': binary_id }
917 for d in generate_path_dicts():
918 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
927 traceback.print_exc()
929 # Only rollback if we set up the session ourself
936 __all__.append('insert_content_paths')
938 ################################################################################
940 class DSCFile(object):
941 def __init__(self, *args, **kwargs):
945 return '<DSCFile %s>' % self.dscfile_id
947 __all__.append('DSCFile')
950 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
952 Returns a list of DSCFiles which may be empty
954 @type dscfile_id: int (optional)
955 @param dscfile_id: the dscfile_id of the DSCFiles to find
957 @type source_id: int (optional)
958 @param source_id: the source id related to the DSCFiles to find
960 @type poolfile_id: int (optional)
961 @param poolfile_id: the poolfile id related to the DSCFiles to find
964 @return: Possibly empty list of DSCFiles
967 q = session.query(DSCFile)
969 if dscfile_id is not None:
970 q = q.filter_by(dscfile_id=dscfile_id)
972 if source_id is not None:
973 q = q.filter_by(source_id=source_id)
975 if poolfile_id is not None:
976 q = q.filter_by(poolfile_id=poolfile_id)
980 __all__.append('get_dscfiles')
982 ################################################################################
984 class ExternalOverride(ORMObject):
985 def __init__(self, *args, **kwargs):
989 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
991 __all__.append('ExternalOverride')
993 ################################################################################
995 class PoolFile(ORMObject):
996 def __init__(self, filename = None, filesize = -1, \
998 self.filename = filename
999 self.filesize = filesize
1000 self.md5sum = md5sum
1004 session = DBConn().session().object_session(self)
1005 af = session.query(ArchiveFile).join(Archive) \
1006 .filter(ArchiveFile.file == self) \
1007 .order_by(Archive.tainted.desc()).first()
1011 def component(self):
1012 session = DBConn().session().object_session(self)
1013 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1014 .group_by(ArchiveFile.component_id).one()
1015 return session.query(Component).get(component_id)
1019 return os.path.basename(self.filename)
1021 def is_valid(self, filesize = -1, md5sum = None):
1022 return self.filesize == long(filesize) and self.md5sum == md5sum
1024 def properties(self):
1025 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1026 'sha256sum', 'source', 'binary', 'last_used']
1028 def not_null_constraints(self):
1029 return ['filename', 'md5sum']
1031 def identical_to(self, filename):
1033 compare size and hash with the given file
1036 @return: true if the given file has the same size and hash as this object; false otherwise
1038 st = os.stat(filename)
1039 if self.filesize != st.st_size:
1042 f = open(filename, "r")
1043 sha256sum = apt_pkg.sha256sum(f)
1044 if sha256sum != self.sha256sum:
1049 __all__.append('PoolFile')
1052 def get_poolfile_like_name(filename, session=None):
1054 Returns an array of PoolFile objects which are like the given name
1056 @type filename: string
1057 @param filename: the filename of the file to check against the DB
1060 @return: array of PoolFile objects
1063 # TODO: There must be a way of properly using bind parameters with %FOO%
1064 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1068 __all__.append('get_poolfile_like_name')
1070 ################################################################################
1072 class Fingerprint(ORMObject):
1073 def __init__(self, fingerprint = None):
1074 self.fingerprint = fingerprint
1076 def properties(self):
1077 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1080 def not_null_constraints(self):
1081 return ['fingerprint']
1083 __all__.append('Fingerprint')
1086 def get_fingerprint(fpr, session=None):
1088 Returns Fingerprint object for given fpr.
1091 @param fpr: The fpr to find / add
1093 @type session: SQLAlchemy
1094 @param session: Optional SQL session object (a temporary one will be
1095 generated if not supplied).
1098 @return: the Fingerprint object for the given fpr or None
1101 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1105 except NoResultFound:
1110 __all__.append('get_fingerprint')
1113 def get_or_set_fingerprint(fpr, session=None):
1115 Returns Fingerprint object for given fpr.
1117 If no matching fpr is found, a row is inserted.
1120 @param fpr: The fpr to find / add
1122 @type session: SQLAlchemy
1123 @param session: Optional SQL session object (a temporary one will be
1124 generated if not supplied). If not passed, a commit will be performed at
1125 the end of the function, otherwise the caller is responsible for commiting.
1126 A flush will be performed either way.
1129 @return: the Fingerprint object for the given fpr
1132 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1136 except NoResultFound:
1137 fingerprint = Fingerprint()
1138 fingerprint.fingerprint = fpr
1139 session.add(fingerprint)
1140 session.commit_or_flush()
1145 __all__.append('get_or_set_fingerprint')
1147 ################################################################################
1149 # Helper routine for Keyring class
1150 def get_ldap_name(entry):
1152 for k in ["cn", "mn", "sn"]:
1154 if ret and ret[0] != "" and ret[0] != "-":
1156 return " ".join(name)
1158 ################################################################################
1160 class Keyring(object):
1161 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1162 " --with-colons --fingerprint --fingerprint"
1167 def __init__(self, *args, **kwargs):
1171 return '<Keyring %s>' % self.keyring_name
1173 def de_escape_gpg_str(self, txt):
1174 esclist = re.split(r'(\\x..)', txt)
1175 for x in range(1,len(esclist),2):
1176 esclist[x] = "%c" % (int(esclist[x][2:],16))
1177 return "".join(esclist)
1179 def parse_address(self, uid):
1180 """parses uid and returns a tuple of real name and email address"""
1182 (name, address) = email.Utils.parseaddr(uid)
1183 name = re.sub(r"\s*[(].*[)]", "", name)
1184 name = self.de_escape_gpg_str(name)
1187 return (name, address)
1189 def load_keys(self, keyring):
1190 if not self.keyring_id:
1191 raise Exception('Must be initialized with database information')
1193 k = os.popen(self.gpg_invocation % keyring, "r")
1195 need_fingerprint = False
1198 field = line.split(":")
1199 if field[0] == "pub":
1202 (name, addr) = self.parse_address(field[9])
1204 self.keys[key]["email"] = addr
1205 self.keys[key]["name"] = name
1206 need_fingerprint = True
1207 elif key and field[0] == "uid":
1208 (name, addr) = self.parse_address(field[9])
1209 if "email" not in self.keys[key] and "@" in addr:
1210 self.keys[key]["email"] = addr
1211 self.keys[key]["name"] = name
1212 elif need_fingerprint and field[0] == "fpr":
1213 self.keys[key]["fingerprints"] = [field[9]]
1214 self.fpr_lookup[field[9]] = key
1215 need_fingerprint = False
1217 def import_users_from_ldap(self, session):
1221 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1222 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1223 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1225 l = ldap.open(LDAPServer)
1228 # TODO: This should request a new context and use
1229 # connection-specific options (i.e. "l.set_option(...)")
1231 # Request a new TLS context. If there was already one, libldap
1232 # would not change the TLS options (like which CAs to trust).
1233 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1234 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1235 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1236 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1239 l.simple_bind_s("","")
1240 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1241 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1242 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1244 ldap_fin_uid_id = {}
1251 uid = entry["uid"][0]
1252 name = get_ldap_name(entry)
1253 fingerprints = entry["keyFingerPrint"]
1255 for f in fingerprints:
1256 key = self.fpr_lookup.get(f, None)
1257 if key not in self.keys:
1259 self.keys[key]["uid"] = uid
1263 keyid = get_or_set_uid(uid, session).uid_id
1264 byuid[keyid] = (uid, name)
1265 byname[uid] = (keyid, name)
1267 return (byname, byuid)
1269 def generate_users_from_keyring(self, format, session):
1273 for x in self.keys.keys():
1274 if "email" not in self.keys[x]:
1276 self.keys[x]["uid"] = format % "invalid-uid"
1278 uid = format % self.keys[x]["email"]
1279 keyid = get_or_set_uid(uid, session).uid_id
1280 byuid[keyid] = (uid, self.keys[x]["name"])
1281 byname[uid] = (keyid, self.keys[x]["name"])
1282 self.keys[x]["uid"] = uid
1285 uid = format % "invalid-uid"
1286 keyid = get_or_set_uid(uid, session).uid_id
1287 byuid[keyid] = (uid, "ungeneratable user id")
1288 byname[uid] = (keyid, "ungeneratable user id")
1290 return (byname, byuid)
1292 __all__.append('Keyring')
1295 def get_keyring(keyring, session=None):
1297 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1298 If C{keyring} already has an entry, simply return the existing Keyring
1300 @type keyring: string
1301 @param keyring: the keyring name
1304 @return: the Keyring object for this keyring
1307 q = session.query(Keyring).filter_by(keyring_name=keyring)
1311 except NoResultFound:
1314 __all__.append('get_keyring')
1317 def get_active_keyring_paths(session=None):
1320 @return: list of active keyring paths
1322 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1324 __all__.append('get_active_keyring_paths')
1327 def get_primary_keyring_path(session=None):
1329 Get the full path to the highest priority active keyring
1332 @return: path to the active keyring with the highest priority or None if no
1333 keyring is configured
1335 keyrings = get_active_keyring_paths()
1337 if len(keyrings) > 0:
1342 __all__.append('get_primary_keyring_path')
1344 ################################################################################
1346 class DBChange(object):
1347 def __init__(self, *args, **kwargs):
1351 return '<DBChange %s>' % self.changesname
1353 __all__.append('DBChange')
1356 def get_dbchange(filename, session=None):
1358 returns DBChange object for given C{filename}.
1360 @type filename: string
1361 @param filename: the name of the file
1363 @type session: Session
1364 @param session: Optional SQLA session object (a temporary one will be
1365 generated if not supplied)
1368 @return: DBChange object for the given filename (C{None} if not present)
1371 q = session.query(DBChange).filter_by(changesname=filename)
1375 except NoResultFound:
1378 __all__.append('get_dbchange')
1380 ################################################################################
1382 class Maintainer(ORMObject):
1383 def __init__(self, name = None):
1386 def properties(self):
1387 return ['name', 'maintainer_id']
1389 def not_null_constraints(self):
1392 def get_split_maintainer(self):
1393 if not hasattr(self, 'name') or self.name is None:
1394 return ('', '', '', '')
1396 return fix_maintainer(self.name.strip())
1398 __all__.append('Maintainer')
1401 def get_or_set_maintainer(name, session=None):
1403 Returns Maintainer object for given maintainer name.
1405 If no matching maintainer name is found, a row is inserted.
1408 @param name: The maintainer name to add
1410 @type session: SQLAlchemy
1411 @param session: Optional SQL session object (a temporary one will be
1412 generated if not supplied). If not passed, a commit will be performed at
1413 the end of the function, otherwise the caller is responsible for commiting.
1414 A flush will be performed either way.
1417 @return: the Maintainer object for the given maintainer
1420 q = session.query(Maintainer).filter_by(name=name)
1423 except NoResultFound:
1424 maintainer = Maintainer()
1425 maintainer.name = name
1426 session.add(maintainer)
1427 session.commit_or_flush()
1432 __all__.append('get_or_set_maintainer')
1435 def get_maintainer(maintainer_id, session=None):
1437 Return the name of the maintainer behind C{maintainer_id} or None if that
1438 maintainer_id is invalid.
1440 @type maintainer_id: int
1441 @param maintainer_id: the id of the maintainer
1444 @return: the Maintainer with this C{maintainer_id}
1447 return session.query(Maintainer).get(maintainer_id)
1449 __all__.append('get_maintainer')
1451 ################################################################################
1453 class NewComment(object):
1454 def __init__(self, *args, **kwargs):
1458 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1460 __all__.append('NewComment')
1463 def has_new_comment(policy_queue, package, version, session=None):
1465 Returns true if the given combination of C{package}, C{version} has a comment.
1467 @type package: string
1468 @param package: name of the package
1470 @type version: string
1471 @param version: package version
1473 @type session: Session
1474 @param session: Optional SQLA session object (a temporary one will be
1475 generated if not supplied)
1481 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1482 q = q.filter_by(package=package)
1483 q = q.filter_by(version=version)
1485 return bool(q.count() > 0)
1487 __all__.append('has_new_comment')
1490 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1492 Returns (possibly empty) list of NewComment objects for the given
1495 @type package: string (optional)
1496 @param package: name of the package
1498 @type version: string (optional)
1499 @param version: package version
1501 @type comment_id: int (optional)
1502 @param comment_id: An id of a comment
1504 @type session: Session
1505 @param session: Optional SQLA session object (a temporary one will be
1506 generated if not supplied)
1509 @return: A (possibly empty) list of NewComment objects will be returned
1512 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1513 if package is not None: q = q.filter_by(package=package)
1514 if version is not None: q = q.filter_by(version=version)
1515 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1519 __all__.append('get_new_comments')
1521 ################################################################################
1523 class Override(ORMObject):
1524 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1525 section = None, priority = None):
1526 self.package = package
1528 self.component = component
1529 self.overridetype = overridetype
1530 self.section = section
1531 self.priority = priority
1533 def properties(self):
1534 return ['package', 'suite', 'component', 'overridetype', 'section', \
1537 def not_null_constraints(self):
1538 return ['package', 'suite', 'component', 'overridetype', 'section']
1540 __all__.append('Override')
1543 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1545 Returns Override object for the given parameters
1547 @type package: string
1548 @param package: The name of the package
1550 @type suite: string, list or None
1551 @param suite: The name of the suite (or suites if a list) to limit to. If
1552 None, don't limit. Defaults to None.
1554 @type component: string, list or None
1555 @param component: The name of the component (or components if a list) to
1556 limit to. If None, don't limit. Defaults to None.
1558 @type overridetype: string, list or None
1559 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1560 limit to. If None, don't limit. Defaults to None.
1562 @type session: Session
1563 @param session: Optional SQLA session object (a temporary one will be
1564 generated if not supplied)
1567 @return: A (possibly empty) list of Override objects will be returned
1570 q = session.query(Override)
1571 q = q.filter_by(package=package)
1573 if suite is not None:
1574 if not isinstance(suite, list): suite = [suite]
1575 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1577 if component is not None:
1578 if not isinstance(component, list): component = [component]
1579 q = q.join(Component).filter(Component.component_name.in_(component))
1581 if overridetype is not None:
1582 if not isinstance(overridetype, list): overridetype = [overridetype]
1583 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1587 __all__.append('get_override')
1590 ################################################################################
1592 class OverrideType(ORMObject):
1593 def __init__(self, overridetype = None):
1594 self.overridetype = overridetype
1596 def properties(self):
1597 return ['overridetype', 'overridetype_id', 'overrides_count']
1599 def not_null_constraints(self):
1600 return ['overridetype']
1602 __all__.append('OverrideType')
1605 def get_override_type(override_type, session=None):
1607 Returns OverrideType object for given C{override type}.
1609 @type override_type: string
1610 @param override_type: The name of the override type
1612 @type session: Session
1613 @param session: Optional SQLA session object (a temporary one will be
1614 generated if not supplied)
1617 @return: the database id for the given override type
1620 q = session.query(OverrideType).filter_by(overridetype=override_type)
1624 except NoResultFound:
1627 __all__.append('get_override_type')
1629 ################################################################################
1631 class PolicyQueue(object):
1632 def __init__(self, *args, **kwargs):
1636 return '<PolicyQueue %s>' % self.queue_name
1638 __all__.append('PolicyQueue')
1641 def get_policy_queue(queuename, session=None):
1643 Returns PolicyQueue object for given C{queue name}
1645 @type queuename: string
1646 @param queuename: The name of the queue
1648 @type session: Session
1649 @param session: Optional SQLA session object (a temporary one will be
1650 generated if not supplied)
1653 @return: PolicyQueue object for the given queue
1656 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1660 except NoResultFound:
1663 __all__.append('get_policy_queue')
1665 ################################################################################
1667 class PolicyQueueUpload(object):
1668 def __cmp__(self, other):
1669 ret = cmp(self.changes.source, other.changes.source)
1671 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1673 if self.source is not None and other.source is None:
1675 elif self.source is None and other.source is not None:
1678 ret = cmp(self.changes.changesname, other.changes.changesname)
1681 __all__.append('PolicyQueueUpload')
1683 ################################################################################
1685 class PolicyQueueByhandFile(object):
1688 __all__.append('PolicyQueueByhandFile')
1690 ################################################################################
1692 class Priority(ORMObject):
1693 def __init__(self, priority = None, level = None):
1694 self.priority = priority
1697 def properties(self):
1698 return ['priority', 'priority_id', 'level', 'overrides_count']
1700 def not_null_constraints(self):
1701 return ['priority', 'level']
1703 def __eq__(self, val):
1704 if isinstance(val, str):
1705 return (self.priority == val)
1706 # This signals to use the normal comparison operator
1707 return NotImplemented
1709 def __ne__(self, val):
1710 if isinstance(val, str):
1711 return (self.priority != val)
1712 # This signals to use the normal comparison operator
1713 return NotImplemented
1715 __all__.append('Priority')
1718 def get_priority(priority, session=None):
1720 Returns Priority object for given C{priority name}.
1722 @type priority: string
1723 @param priority: The name of the priority
1725 @type session: Session
1726 @param session: Optional SQLA session object (a temporary one will be
1727 generated if not supplied)
1730 @return: Priority object for the given priority
1733 q = session.query(Priority).filter_by(priority=priority)
1737 except NoResultFound:
1740 __all__.append('get_priority')
1743 def get_priorities(session=None):
1745 Returns dictionary of priority names -> id mappings
1747 @type session: Session
1748 @param session: Optional SQL session object (a temporary one will be
1749 generated if not supplied)
1752 @return: dictionary of priority names -> id mappings
1756 q = session.query(Priority)
1758 ret[x.priority] = x.priority_id
1762 __all__.append('get_priorities')
1764 ################################################################################
1766 class Section(ORMObject):
1767 def __init__(self, section = None):
1768 self.section = section
1770 def properties(self):
1771 return ['section', 'section_id', 'overrides_count']
1773 def not_null_constraints(self):
1776 def __eq__(self, val):
1777 if isinstance(val, str):
1778 return (self.section == val)
1779 # This signals to use the normal comparison operator
1780 return NotImplemented
1782 def __ne__(self, val):
1783 if isinstance(val, str):
1784 return (self.section != val)
1785 # This signals to use the normal comparison operator
1786 return NotImplemented
1788 __all__.append('Section')
1791 def get_section(section, session=None):
1793 Returns Section object for given C{section name}.
1795 @type section: string
1796 @param section: The name of the section
1798 @type session: Session
1799 @param session: Optional SQLA session object (a temporary one will be
1800 generated if not supplied)
1803 @return: Section object for the given section name
1806 q = session.query(Section).filter_by(section=section)
1810 except NoResultFound:
1813 __all__.append('get_section')
1816 def get_sections(session=None):
1818 Returns dictionary of section names -> id mappings
1820 @type session: Session
1821 @param session: Optional SQL session object (a temporary one will be
1822 generated if not supplied)
1825 @return: dictionary of section names -> id mappings
1829 q = session.query(Section)
1831 ret[x.section] = x.section_id
1835 __all__.append('get_sections')
1837 ################################################################################
1839 class SignatureHistory(ORMObject):
1841 def from_signed_file(cls, signed_file):
1842 """signature history entry from signed file
1844 @type signed_file: L{daklib.gpg.SignedFile}
1845 @param signed_file: signed file
1847 @rtype: L{SignatureHistory}
1850 self.fingerprint = signed_file.primary_fingerprint
1851 self.signature_timestamp = signed_file.signature_timestamp
1852 self.contents_sha1 = signed_file.contents_sha1()
1855 __all__.append('SignatureHistory')
1857 ################################################################################
1859 class SrcContents(ORMObject):
1860 def __init__(self, file = None, source = None):
1862 self.source = source
1864 def properties(self):
1865 return ['file', 'source']
1867 __all__.append('SrcContents')
1869 ################################################################################
1871 from debian.debfile import Deb822
1873 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1874 class Dak822(Deb822):
1875 def _internal_parser(self, sequence, fields=None):
1876 # The key is non-whitespace, non-colon characters before any colon.
1877 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1878 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1879 multi = re.compile(key_part + r"$")
1880 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1882 wanted_field = lambda f: fields is None or f in fields
1884 if isinstance(sequence, basestring):
1885 sequence = sequence.splitlines()
1889 for line in self.gpg_stripped_paragraph(sequence):
1890 m = single.match(line)
1893 self[curkey] = content
1895 if not wanted_field(m.group('key')):
1899 curkey = m.group('key')
1900 content = m.group('data')
1903 m = multi.match(line)
1906 self[curkey] = content
1908 if not wanted_field(m.group('key')):
1912 curkey = m.group('key')
1916 m = multidata.match(line)
1918 content += '\n' + line # XXX not m.group('data')?
1922 self[curkey] = content
1925 class DBSource(ORMObject):
1926 def __init__(self, source = None, version = None, maintainer = None, \
1927 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1928 self.source = source
1929 self.version = version
1930 self.maintainer = maintainer
1931 self.changedby = changedby
1932 self.poolfile = poolfile
1933 self.install_date = install_date
1934 self.fingerprint = fingerprint
1938 return self.source_id
1940 def properties(self):
1941 return ['source', 'source_id', 'maintainer', 'changedby', \
1942 'fingerprint', 'poolfile', 'version', 'suites_count', \
1943 'install_date', 'binaries_count', 'uploaders_count']
1945 def not_null_constraints(self):
1946 return ['source', 'version', 'install_date', 'maintainer', \
1947 'changedby', 'poolfile']
1949 def read_control_fields(self):
1951 Reads the control information from a dsc
1954 @return: fields is the dsc information in a dictionary form
1956 fullpath = self.poolfile.fullpath
1957 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1960 metadata = association_proxy('key', 'value')
1962 def scan_contents(self):
1964 Returns a set of names for non directories. The path names are
1965 normalized after converting them from either utf-8 or iso8859-1
1968 fullpath = self.poolfile.fullpath
1969 from daklib.contents import UnpackedSource
1970 unpacked = UnpackedSource(fullpath)
1972 for name in unpacked.get_all_filenames():
1973 # enforce proper utf-8 encoding
1975 name.decode('utf-8')
1976 except UnicodeDecodeError:
1977 name = name.decode('iso8859-1').encode('utf-8')
1981 __all__.append('DBSource')
1984 def source_exists(source, source_version, suites = ["any"], session=None):
1986 Ensure that source exists somewhere in the archive for the binary
1987 upload being processed.
1988 1. exact match => 1.0-3
1989 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
1991 @type source: string
1992 @param source: source name
1994 @type source_version: string
1995 @param source_version: expected source version
1998 @param suites: list of suites to check in, default I{any}
2000 @type session: Session
2001 @param session: Optional SQLA session object (a temporary one will be
2002 generated if not supplied)
2005 @return: returns 1 if a source with expected version is found, otherwise 0
2012 from daklib.regexes import re_bin_only_nmu
2013 orig_source_version = re_bin_only_nmu.sub('', source_version)
2015 for suite in suites:
2016 q = session.query(DBSource).filter_by(source=source). \
2017 filter(DBSource.version.in_([source_version, orig_source_version]))
2019 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2020 s = get_suite(suite, session)
2022 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2023 considered_suites = [ vc.reference for vc in enhances_vcs ]
2024 considered_suites.append(s)
2026 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2031 # No source found so return not ok
2036 __all__.append('source_exists')
2039 def get_suites_source_in(source, session=None):
2041 Returns list of Suite objects which given C{source} name is in
2044 @param source: DBSource package name to search for
2047 @return: list of Suite objects for the given source
2050 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2052 __all__.append('get_suites_source_in')
2055 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2057 Returns list of DBSource objects for given C{source} name and other parameters
2060 @param source: DBSource package name to search for
2062 @type version: str or None
2063 @param version: DBSource version name to search for or None if not applicable
2065 @type dm_upload_allowed: bool
2066 @param dm_upload_allowed: If None, no effect. If True or False, only
2067 return packages with that dm_upload_allowed setting
2069 @type session: Session
2070 @param session: Optional SQL session object (a temporary one will be
2071 generated if not supplied)
2074 @return: list of DBSource objects for the given name (may be empty)
2077 q = session.query(DBSource).filter_by(source=source)
2079 if version is not None:
2080 q = q.filter_by(version=version)
2082 if dm_upload_allowed is not None:
2083 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2087 __all__.append('get_sources_from_name')
2089 # FIXME: This function fails badly if it finds more than 1 source package and
2090 # its implementation is trivial enough to be inlined.
2092 def get_source_in_suite(source, suite, session=None):
2094 Returns a DBSource object for a combination of C{source} and C{suite}.
2096 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2097 - B{suite} - a suite name, eg. I{unstable}
2099 @type source: string
2100 @param source: source package name
2103 @param suite: the suite name
2106 @return: the version for I{source} in I{suite}
2110 q = get_suite(suite, session).get_sources(source)
2113 except NoResultFound:
2116 __all__.append('get_source_in_suite')
2119 def import_metadata_into_db(obj, session=None):
2121 This routine works on either DBBinary or DBSource objects and imports
2122 their metadata into the database
2124 fields = obj.read_control_fields()
2125 for k in fields.keys():
2128 val = str(fields[k])
2129 except UnicodeEncodeError:
2130 # Fall back to UTF-8
2132 val = fields[k].encode('utf-8')
2133 except UnicodeEncodeError:
2134 # Finally try iso8859-1
2135 val = fields[k].encode('iso8859-1')
2136 # Otherwise we allow the exception to percolate up and we cause
2137 # a reject as someone is playing silly buggers
2139 obj.metadata[get_or_set_metadatakey(k, session)] = val
2141 session.commit_or_flush()
2143 __all__.append('import_metadata_into_db')
2145 ################################################################################
2147 class SrcFormat(object):
2148 def __init__(self, *args, **kwargs):
2152 return '<SrcFormat %s>' % (self.format_name)
2154 __all__.append('SrcFormat')
2156 ################################################################################
2158 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2159 ('SuiteID', 'suite_id'),
2160 ('Version', 'version'),
2161 ('Origin', 'origin'),
2163 ('Description', 'description'),
2164 ('Untouchable', 'untouchable'),
2165 ('Announce', 'announce'),
2166 ('Codename', 'codename'),
2167 ('OverrideCodename', 'overridecodename'),
2168 ('ValidTime', 'validtime'),
2169 ('Priority', 'priority'),
2170 ('NotAutomatic', 'notautomatic'),
2171 ('CopyChanges', 'copychanges'),
2172 ('OverrideSuite', 'overridesuite')]
2174 # Why the heck don't we have any UNIQUE constraints in table suite?
2175 # TODO: Add UNIQUE constraints for appropriate columns.
2176 class Suite(ORMObject):
2177 def __init__(self, suite_name = None, version = None):
2178 self.suite_name = suite_name
2179 self.version = version
2181 def properties(self):
2182 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2185 def not_null_constraints(self):
2186 return ['suite_name']
2188 def __eq__(self, val):
2189 if isinstance(val, str):
2190 return (self.suite_name == val)
2191 # This signals to use the normal comparison operator
2192 return NotImplemented
2194 def __ne__(self, val):
2195 if isinstance(val, str):
2196 return (self.suite_name != val)
2197 # This signals to use the normal comparison operator
2198 return NotImplemented
2202 for disp, field in SUITE_FIELDS:
2203 val = getattr(self, field, None)
2205 ret.append("%s: %s" % (disp, val))
2207 return "\n".join(ret)
2209 def get_architectures(self, skipsrc=False, skipall=False):
2211 Returns list of Architecture objects
2213 @type skipsrc: boolean
2214 @param skipsrc: Whether to skip returning the 'source' architecture entry
2217 @type skipall: boolean
2218 @param skipall: Whether to skip returning the 'all' architecture entry
2222 @return: list of Architecture objects for the given name (may be empty)
2225 q = object_session(self).query(Architecture).with_parent(self)
2227 q = q.filter(Architecture.arch_string != 'source')
2229 q = q.filter(Architecture.arch_string != 'all')
2230 return q.order_by(Architecture.arch_string).all()
2232 def get_sources(self, source):
2234 Returns a query object representing DBSource that is part of C{suite}.
2236 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2238 @type source: string
2239 @param source: source package name
2241 @rtype: sqlalchemy.orm.query.Query
2242 @return: a query of DBSource
2246 session = object_session(self)
2247 return session.query(DBSource).filter_by(source = source). \
2250 def get_overridesuite(self):
2251 if self.overridesuite is None:
2254 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2258 return os.path.join(self.archive.path, 'dists', self.suite_name)
2260 __all__.append('Suite')
2263 def get_suite(suite, session=None):
2265 Returns Suite object for given C{suite name}.
2268 @param suite: The name of the suite
2270 @type session: Session
2271 @param session: Optional SQLA session object (a temporary one will be
2272 generated if not supplied)
2275 @return: Suite object for the requested suite name (None if not present)
2278 q = session.query(Suite).filter_by(suite_name=suite)
2282 except NoResultFound:
2285 __all__.append('get_suite')
2287 ################################################################################
2290 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2292 Returns list of Architecture objects for given C{suite} name. The list is
2293 empty if suite does not exist.
2296 @param suite: Suite name to search for
2298 @type skipsrc: boolean
2299 @param skipsrc: Whether to skip returning the 'source' architecture entry
2302 @type skipall: boolean
2303 @param skipall: Whether to skip returning the 'all' architecture entry
2306 @type session: Session
2307 @param session: Optional SQL session object (a temporary one will be
2308 generated if not supplied)
2311 @return: list of Architecture objects for the given name (may be empty)
2315 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2316 except AttributeError:
2319 __all__.append('get_suite_architectures')
2321 ################################################################################
2323 class Uid(ORMObject):
2324 def __init__(self, uid = None, name = None):
2328 def __eq__(self, val):
2329 if isinstance(val, str):
2330 return (self.uid == val)
2331 # This signals to use the normal comparison operator
2332 return NotImplemented
2334 def __ne__(self, val):
2335 if isinstance(val, str):
2336 return (self.uid != val)
2337 # This signals to use the normal comparison operator
2338 return NotImplemented
2340 def properties(self):
2341 return ['uid', 'name', 'fingerprint']
2343 def not_null_constraints(self):
2346 __all__.append('Uid')
2349 def get_or_set_uid(uidname, session=None):
2351 Returns uid object for given uidname.
2353 If no matching uidname is found, a row is inserted.
2355 @type uidname: string
2356 @param uidname: The uid to add
2358 @type session: SQLAlchemy
2359 @param session: Optional SQL session object (a temporary one will be
2360 generated if not supplied). If not passed, a commit will be performed at
2361 the end of the function, otherwise the caller is responsible for commiting.
2364 @return: the uid object for the given uidname
2367 q = session.query(Uid).filter_by(uid=uidname)
2371 except NoResultFound:
2375 session.commit_or_flush()
2380 __all__.append('get_or_set_uid')
2383 def get_uid_from_fingerprint(fpr, session=None):
2384 q = session.query(Uid)
2385 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2389 except NoResultFound:
2392 __all__.append('get_uid_from_fingerprint')
2394 ################################################################################
2396 class MetadataKey(ORMObject):
2397 def __init__(self, key = None):
2400 def properties(self):
2403 def not_null_constraints(self):
2406 __all__.append('MetadataKey')
2409 def get_or_set_metadatakey(keyname, session=None):
2411 Returns MetadataKey object for given uidname.
2413 If no matching keyname is found, a row is inserted.
2415 @type uidname: string
2416 @param uidname: The keyname to add
2418 @type session: SQLAlchemy
2419 @param session: Optional SQL session object (a temporary one will be
2420 generated if not supplied). If not passed, a commit will be performed at
2421 the end of the function, otherwise the caller is responsible for commiting.
2424 @return: the metadatakey object for the given keyname
2427 q = session.query(MetadataKey).filter_by(key=keyname)
2431 except NoResultFound:
2432 ret = MetadataKey(keyname)
2434 session.commit_or_flush()
2438 __all__.append('get_or_set_metadatakey')
2440 ################################################################################
2442 class BinaryMetadata(ORMObject):
2443 def __init__(self, key = None, value = None, binary = None):
2446 self.binary = binary
2448 def properties(self):
2449 return ['binary', 'key', 'value']
2451 def not_null_constraints(self):
2454 __all__.append('BinaryMetadata')
2456 ################################################################################
2458 class SourceMetadata(ORMObject):
2459 def __init__(self, key = None, value = None, source = None):
2462 self.source = source
2464 def properties(self):
2465 return ['source', 'key', 'value']
2467 def not_null_constraints(self):
2470 __all__.append('SourceMetadata')
2472 ################################################################################
2474 class VersionCheck(ORMObject):
2475 def __init__(self, *args, **kwargs):
2478 def properties(self):
2479 #return ['suite_id', 'check', 'reference_id']
2482 def not_null_constraints(self):
2483 return ['suite', 'check', 'reference']
2485 __all__.append('VersionCheck')
2488 def get_version_checks(suite_name, check = None, session = None):
2489 suite = get_suite(suite_name, session)
2491 # Make sure that what we return is iterable so that list comprehensions
2492 # involving this don't cause a traceback
2494 q = session.query(VersionCheck).filter_by(suite=suite)
2496 q = q.filter_by(check=check)
2499 __all__.append('get_version_checks')
2501 ################################################################################
2503 class DBConn(object):
2505 database module init.
2509 def __init__(self, *args, **kwargs):
2510 self.__dict__ = self.__shared_state
2512 if not getattr(self, 'initialised', False):
2513 self.initialised = True
2514 self.debug = kwargs.has_key('debug')
2517 def __setuptables(self):
2520 'acl_architecture_map',
2521 'acl_fingerprint_map',
2528 'binaries_metadata',
2536 'external_overrides',
2537 'extra_src_references',
2539 'files_archive_map',
2545 # TODO: the maintainer column in table override should be removed.
2549 'policy_queue_upload',
2550 'policy_queue_upload_binaries_map',
2551 'policy_queue_byhand_file',
2554 'signature_history',
2563 'suite_architectures',
2564 'suite_build_queue_copy',
2565 'suite_src_formats',
2571 'almost_obsolete_all_associations',
2572 'almost_obsolete_src_associations',
2573 'any_associations_source',
2574 'bin_associations_binaries',
2575 'binaries_suite_arch',
2578 'newest_all_associations',
2579 'newest_any_associations',
2581 'newest_src_association',
2582 'obsolete_all_associations',
2583 'obsolete_any_associations',
2584 'obsolete_any_by_all_associations',
2585 'obsolete_src_associations',
2587 'src_associations_bin',
2588 'src_associations_src',
2589 'suite_arch_by_name',
2592 for table_name in tables:
2593 table = Table(table_name, self.db_meta, \
2594 autoload=True, useexisting=True)
2595 setattr(self, 'tbl_%s' % table_name, table)
2597 for view_name in views:
2598 view = Table(view_name, self.db_meta, autoload=True)
2599 setattr(self, 'view_%s' % view_name, view)
2601 def __setupmappers(self):
2602 mapper(Architecture, self.tbl_architecture,
2603 properties = dict(arch_id = self.tbl_architecture.c.id,
2604 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2605 order_by=self.tbl_suite.c.suite_name,
2606 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2607 extension = validator)
2609 mapper(ACL, self.tbl_acl,
2611 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2612 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2613 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2614 per_source = relation(ACLPerSource, collection_class=set),
2617 mapper(ACLPerSource, self.tbl_acl_per_source,
2619 acl = relation(ACL),
2620 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2621 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2624 mapper(Archive, self.tbl_archive,
2625 properties = dict(archive_id = self.tbl_archive.c.id,
2626 archive_name = self.tbl_archive.c.name))
2628 mapper(ArchiveFile, self.tbl_files_archive_map,
2629 properties = dict(archive = relation(Archive, backref='files'),
2630 component = relation(Component),
2631 file = relation(PoolFile, backref='archives')))
2633 mapper(BuildQueue, self.tbl_build_queue,
2634 properties = dict(queue_id = self.tbl_build_queue.c.id,
2635 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2637 mapper(DBBinary, self.tbl_binaries,
2638 properties = dict(binary_id = self.tbl_binaries.c.id,
2639 package = self.tbl_binaries.c.package,
2640 version = self.tbl_binaries.c.version,
2641 maintainer_id = self.tbl_binaries.c.maintainer,
2642 maintainer = relation(Maintainer),
2643 source_id = self.tbl_binaries.c.source,
2644 source = relation(DBSource, backref='binaries'),
2645 arch_id = self.tbl_binaries.c.architecture,
2646 architecture = relation(Architecture),
2647 poolfile_id = self.tbl_binaries.c.file,
2648 poolfile = relation(PoolFile),
2649 binarytype = self.tbl_binaries.c.type,
2650 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2651 fingerprint = relation(Fingerprint),
2652 install_date = self.tbl_binaries.c.install_date,
2653 suites = relation(Suite, secondary=self.tbl_bin_associations,
2654 backref=backref('binaries', lazy='dynamic')),
2655 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2656 backref=backref('extra_binary_references', lazy='dynamic')),
2657 key = relation(BinaryMetadata, cascade='all',
2658 collection_class=attribute_mapped_collection('key'))),
2659 extension = validator)
2661 mapper(Component, self.tbl_component,
2662 properties = dict(component_id = self.tbl_component.c.id,
2663 component_name = self.tbl_component.c.name),
2664 extension = validator)
2666 mapper(DBConfig, self.tbl_config,
2667 properties = dict(config_id = self.tbl_config.c.id))
2669 mapper(DSCFile, self.tbl_dsc_files,
2670 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2671 source_id = self.tbl_dsc_files.c.source,
2672 source = relation(DBSource),
2673 poolfile_id = self.tbl_dsc_files.c.file,
2674 poolfile = relation(PoolFile)))
2676 mapper(ExternalOverride, self.tbl_external_overrides,
2678 suite_id = self.tbl_external_overrides.c.suite,
2679 suite = relation(Suite),
2680 component_id = self.tbl_external_overrides.c.component,
2681 component = relation(Component)))
2683 mapper(PoolFile, self.tbl_files,
2684 properties = dict(file_id = self.tbl_files.c.id,
2685 filesize = self.tbl_files.c.size),
2686 extension = validator)
2688 mapper(Fingerprint, self.tbl_fingerprint,
2689 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2690 uid_id = self.tbl_fingerprint.c.uid,
2691 uid = relation(Uid),
2692 keyring_id = self.tbl_fingerprint.c.keyring,
2693 keyring = relation(Keyring),
2694 acl = relation(ACL)),
2695 extension = validator)
2697 mapper(Keyring, self.tbl_keyrings,
2698 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2699 keyring_id = self.tbl_keyrings.c.id,
2700 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2702 mapper(DBChange, self.tbl_changes,
2703 properties = dict(change_id = self.tbl_changes.c.id,
2704 seen = self.tbl_changes.c.seen,
2705 source = self.tbl_changes.c.source,
2706 binaries = self.tbl_changes.c.binaries,
2707 architecture = self.tbl_changes.c.architecture,
2708 distribution = self.tbl_changes.c.distribution,
2709 urgency = self.tbl_changes.c.urgency,
2710 maintainer = self.tbl_changes.c.maintainer,
2711 changedby = self.tbl_changes.c.changedby,
2712 date = self.tbl_changes.c.date,
2713 version = self.tbl_changes.c.version))
2715 mapper(Maintainer, self.tbl_maintainer,
2716 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2717 maintains_sources = relation(DBSource, backref='maintainer',
2718 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2719 changed_sources = relation(DBSource, backref='changedby',
2720 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2721 extension = validator)
2723 mapper(NewComment, self.tbl_new_comments,
2724 properties = dict(comment_id = self.tbl_new_comments.c.id,
2725 policy_queue = relation(PolicyQueue)))
2727 mapper(Override, self.tbl_override,
2728 properties = dict(suite_id = self.tbl_override.c.suite,
2729 suite = relation(Suite, \
2730 backref=backref('overrides', lazy='dynamic')),
2731 package = self.tbl_override.c.package,
2732 component_id = self.tbl_override.c.component,
2733 component = relation(Component, \
2734 backref=backref('overrides', lazy='dynamic')),
2735 priority_id = self.tbl_override.c.priority,
2736 priority = relation(Priority, \
2737 backref=backref('overrides', lazy='dynamic')),
2738 section_id = self.tbl_override.c.section,
2739 section = relation(Section, \
2740 backref=backref('overrides', lazy='dynamic')),
2741 overridetype_id = self.tbl_override.c.type,
2742 overridetype = relation(OverrideType, \
2743 backref=backref('overrides', lazy='dynamic'))))
2745 mapper(OverrideType, self.tbl_override_type,
2746 properties = dict(overridetype = self.tbl_override_type.c.type,
2747 overridetype_id = self.tbl_override_type.c.id))
2749 mapper(PolicyQueue, self.tbl_policy_queue,
2750 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2751 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2753 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2755 changes = relation(DBChange),
2756 policy_queue = relation(PolicyQueue, backref='uploads'),
2757 target_suite = relation(Suite),
2758 source = relation(DBSource),
2759 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2762 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2764 upload = relation(PolicyQueueUpload, backref='byhand'),
2768 mapper(Priority, self.tbl_priority,
2769 properties = dict(priority_id = self.tbl_priority.c.id))
2771 mapper(Section, self.tbl_section,
2772 properties = dict(section_id = self.tbl_section.c.id,
2773 section=self.tbl_section.c.section))
2775 mapper(SignatureHistory, self.tbl_signature_history)
2777 mapper(DBSource, self.tbl_source,
2778 properties = dict(source_id = self.tbl_source.c.id,
2779 version = self.tbl_source.c.version,
2780 maintainer_id = self.tbl_source.c.maintainer,
2781 poolfile_id = self.tbl_source.c.file,
2782 poolfile = relation(PoolFile),
2783 fingerprint_id = self.tbl_source.c.sig_fpr,
2784 fingerprint = relation(Fingerprint),
2785 changedby_id = self.tbl_source.c.changedby,
2786 srcfiles = relation(DSCFile,
2787 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2788 suites = relation(Suite, secondary=self.tbl_src_associations,
2789 backref=backref('sources', lazy='dynamic')),
2790 uploaders = relation(Maintainer,
2791 secondary=self.tbl_src_uploaders),
2792 key = relation(SourceMetadata, cascade='all',
2793 collection_class=attribute_mapped_collection('key'))),
2794 extension = validator)
2796 mapper(SrcFormat, self.tbl_src_format,
2797 properties = dict(src_format_id = self.tbl_src_format.c.id,
2798 format_name = self.tbl_src_format.c.format_name))
2800 mapper(Suite, self.tbl_suite,
2801 properties = dict(suite_id = self.tbl_suite.c.id,
2802 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2803 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2804 copy_queues = relation(BuildQueue,
2805 secondary=self.tbl_suite_build_queue_copy),
2806 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2807 backref=backref('suites', lazy='dynamic')),
2808 archive = relation(Archive, backref='suites'),
2809 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2810 components = relation(Component, secondary=self.tbl_component_suite,
2811 order_by=self.tbl_component.c.ordering,
2812 backref=backref('suites'))),
2813 extension = validator)
2815 mapper(Uid, self.tbl_uid,
2816 properties = dict(uid_id = self.tbl_uid.c.id,
2817 fingerprint = relation(Fingerprint)),
2818 extension = validator)
2820 mapper(BinContents, self.tbl_bin_contents,
2822 binary = relation(DBBinary,
2823 backref=backref('contents', lazy='dynamic', cascade='all')),
2824 file = self.tbl_bin_contents.c.file))
2826 mapper(SrcContents, self.tbl_src_contents,
2828 source = relation(DBSource,
2829 backref=backref('contents', lazy='dynamic', cascade='all')),
2830 file = self.tbl_src_contents.c.file))
2832 mapper(MetadataKey, self.tbl_metadata_keys,
2834 key_id = self.tbl_metadata_keys.c.key_id,
2835 key = self.tbl_metadata_keys.c.key))
2837 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2839 binary_id = self.tbl_binaries_metadata.c.bin_id,
2840 binary = relation(DBBinary),
2841 key_id = self.tbl_binaries_metadata.c.key_id,
2842 key = relation(MetadataKey),
2843 value = self.tbl_binaries_metadata.c.value))
2845 mapper(SourceMetadata, self.tbl_source_metadata,
2847 source_id = self.tbl_source_metadata.c.src_id,
2848 source = relation(DBSource),
2849 key_id = self.tbl_source_metadata.c.key_id,
2850 key = relation(MetadataKey),
2851 value = self.tbl_source_metadata.c.value))
2853 mapper(VersionCheck, self.tbl_version_check,
2855 suite_id = self.tbl_version_check.c.suite,
2856 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2857 reference_id = self.tbl_version_check.c.reference,
2858 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2860 ## Connection functions
2861 def __createconn(self):
2862 from config import Config
2864 if cnf.has_key("DB::Service"):
2865 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2866 elif cnf.has_key("DB::Host"):
2868 connstr = "postgresql://%s" % cnf["DB::Host"]
2869 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2870 connstr += ":%s" % cnf["DB::Port"]
2871 connstr += "/%s" % cnf["DB::Name"]
2874 connstr = "postgresql:///%s" % cnf["DB::Name"]
2875 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2876 connstr += "?port=%s" % cnf["DB::Port"]
2878 engine_args = { 'echo': self.debug }
2879 if cnf.has_key('DB::PoolSize'):
2880 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2881 if cnf.has_key('DB::MaxOverflow'):
2882 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2883 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2884 cnf['DB::Unicode'] == 'false':
2885 engine_args['use_native_unicode'] = False
2887 # Monkey patch a new dialect in in order to support service= syntax
2888 import sqlalchemy.dialects.postgresql
2889 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2890 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2891 def create_connect_args(self, url):
2892 if str(url).startswith('postgresql://service='):
2894 servicename = str(url)[21:]
2895 return (['service=%s' % servicename], {})
2897 return PGDialect_psycopg2.create_connect_args(self, url)
2899 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2902 self.db_pg = create_engine(connstr, **engine_args)
2903 self.db_meta = MetaData()
2904 self.db_meta.bind = self.db_pg
2905 self.db_smaker = sessionmaker(bind=self.db_pg,
2909 self.__setuptables()
2910 self.__setupmappers()
2912 except OperationalError as e:
2914 utils.fubar("Cannot connect to database (%s)" % str(e))
2916 self.pid = os.getpid()
2918 def session(self, work_mem = 0):
2920 Returns a new session object. If a work_mem parameter is provided a new
2921 transaction is started and the work_mem parameter is set for this
2922 transaction. The work_mem parameter is measured in MB. A default value
2923 will be used if the parameter is not set.
2925 # reinitialize DBConn in new processes
2926 if self.pid != os.getpid():
2929 session = self.db_smaker()
2931 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2934 __all__.append('DBConn')