5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 import daklib.daksubprocess
39 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from tarfile import TarFile
57 from inspect import getargspec
60 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
62 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
63 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
64 from sqlalchemy import types as sqltypes
65 from sqlalchemy.orm.collections import attribute_mapped_collection
66 from sqlalchemy.ext.associationproxy import association_proxy
68 # Don't remove this, we re-export the exceptions to scripts which import us
69 from sqlalchemy.exc import *
70 from sqlalchemy.orm.exc import NoResultFound
72 # Only import Config until Queue stuff is changed to store its config
74 from config import Config
75 from textutils import fix_maintainer
76 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
78 # suppress some deprecation warnings in squeeze related to sqlalchemy
80 warnings.filterwarnings('ignore', \
81 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
83 warnings.filterwarnings('ignore', \
84 "Predicate of partial index .* ignored during reflection", \
88 ################################################################################
90 # Patch in support for the debversion field type so that it works during
94 # that is for sqlalchemy 0.6
95 UserDefinedType = sqltypes.UserDefinedType
97 # this one for sqlalchemy 0.5
98 UserDefinedType = sqltypes.TypeEngine
100 class DebVersion(UserDefinedType):
101 def get_col_spec(self):
104 def bind_processor(self, dialect):
107 # ' = None' is needed for sqlalchemy 0.5:
108 def result_processor(self, dialect, coltype = None):
111 sa_major_version = sqlalchemy.__version__[0:3]
112 if sa_major_version in ["0.5", "0.6", "0.7", "0.8"]:
113 from sqlalchemy.databases import postgres
114 postgres.ischema_names['debversion'] = DebVersion
116 raise Exception("dak only ported to SQLA versions 0.5 to 0.8. See daklib/dbconn.py")
118 ################################################################################
120 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
122 ################################################################################
124 def session_wrapper(fn):
126 Wrapper around common ".., session=None):" handling. If the wrapped
127 function is called without passing 'session', we create a local one
128 and destroy it when the function ends.
130 Also attaches a commit_or_flush method to the session; if we created a
131 local session, this is a synonym for session.commit(), otherwise it is a
132 synonym for session.flush().
135 def wrapped(*args, **kwargs):
136 private_transaction = False
138 # Find the session object
139 session = kwargs.get('session')
142 if len(args) <= len(getargspec(fn)[0]) - 1:
143 # No session specified as last argument or in kwargs
144 private_transaction = True
145 session = kwargs['session'] = DBConn().session()
147 # Session is last argument in args
151 session = args[-1] = DBConn().session()
152 private_transaction = True
154 if private_transaction:
155 session.commit_or_flush = session.commit
157 session.commit_or_flush = session.flush
160 return fn(*args, **kwargs)
162 if private_transaction:
163 # We created a session; close it.
166 wrapped.__doc__ = fn.__doc__
167 wrapped.func_name = fn.func_name
171 __all__.append('session_wrapper')
173 ################################################################################
175 class ORMObject(object):
177 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
178 derived classes must implement the properties() method.
181 def properties(self):
183 This method should be implemented by all derived classes and returns a
184 list of the important properties. The properties 'created' and
185 'modified' will be added automatically. A suffix '_count' should be
186 added to properties that are lists or query objects. The most important
187 property name should be returned as the first element in the list
188 because it is used by repr().
194 Returns a JSON representation of the object based on the properties
195 returned from the properties() method.
198 # add created and modified
199 all_properties = self.properties() + ['created', 'modified']
200 for property in all_properties:
201 # check for list or query
202 if property[-6:] == '_count':
203 real_property = property[:-6]
204 if not hasattr(self, real_property):
206 value = getattr(self, real_property)
207 if hasattr(value, '__len__'):
210 elif hasattr(value, 'count'):
211 # query (but not during validation)
212 if self.in_validation:
214 value = value.count()
216 raise KeyError('Do not understand property %s.' % property)
218 if not hasattr(self, property):
221 value = getattr(self, property)
225 elif isinstance(value, ORMObject):
226 # use repr() for ORMObject types
229 # we want a string for all other types because json cannot
232 data[property] = value
233 return json.dumps(data)
237 Returns the name of the class.
239 return type(self).__name__
243 Returns a short string representation of the object using the first
244 element from the properties() method.
246 primary_property = self.properties()[0]
247 value = getattr(self, primary_property)
248 return '<%s %s>' % (self.classname(), str(value))
252 Returns a human readable form of the object using the properties()
255 return '<%s %s>' % (self.classname(), self.json())
257 def not_null_constraints(self):
259 Returns a list of properties that must be not NULL. Derived classes
260 should override this method if needed.
264 validation_message = \
265 "Validation failed because property '%s' must not be empty in object\n%s"
267 in_validation = False
271 This function validates the not NULL constraints as returned by
272 not_null_constraints(). It raises the DBUpdateError exception if
275 for property in self.not_null_constraints():
276 # TODO: It is a bit awkward that the mapper configuration allow
277 # directly setting the numeric _id columns. We should get rid of it
279 if hasattr(self, property + '_id') and \
280 getattr(self, property + '_id') is not None:
282 if not hasattr(self, property) or getattr(self, property) is None:
283 # str() might lead to races due to a 2nd flush
284 self.in_validation = True
285 message = self.validation_message % (property, str(self))
286 self.in_validation = False
287 raise DBUpdateError(message)
291 def get(cls, primary_key, session = None):
293 This is a support function that allows getting an object by its primary
296 Architecture.get(3[, session])
298 instead of the more verbose
300 session.query(Architecture).get(3)
302 return session.query(cls).get(primary_key)
304 def session(self, replace = False):
306 Returns the current session that is associated with the object. May
307 return None is object is in detached state.
310 return object_session(self)
312 def clone(self, session = None):
314 Clones the current object in a new session and returns the new clone. A
315 fresh session is created if the optional session parameter is not
316 provided. The function will fail if a session is provided and has
319 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
320 an existing object to allow several threads to work with their own
321 instances of an ORMObject.
323 WARNING: Only persistent (committed) objects can be cloned. Changes
324 made to the original object that are not committed yet will get lost.
325 The session of the new object will always be rolled back to avoid
329 if self.session() is None:
330 raise RuntimeError( \
331 'Method clone() failed for detached object:\n%s' % self)
332 self.session().flush()
333 mapper = object_mapper(self)
334 primary_key = mapper.primary_key_from_instance(self)
335 object_class = self.__class__
337 session = DBConn().session()
338 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
339 raise RuntimeError( \
340 'Method clone() failed due to unflushed changes in session.')
341 new_object = session.query(object_class).get(primary_key)
343 if new_object is None:
344 raise RuntimeError( \
345 'Method clone() failed for non-persistent object:\n%s' % self)
348 __all__.append('ORMObject')
350 ################################################################################
352 class Validator(MapperExtension):
354 This class calls the validate() method for each instance for the
355 'before_update' and 'before_insert' events. A global object validator is
356 used for configuring the individual mappers.
359 def before_update(self, mapper, connection, instance):
363 def before_insert(self, mapper, connection, instance):
367 validator = Validator()
369 ################################################################################
371 class ACL(ORMObject):
373 return "<ACL {0}>".format(self.name)
375 __all__.append('ACL')
377 class ACLPerSource(ORMObject):
379 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
381 __all__.append('ACLPerSource')
383 ################################################################################
385 class Architecture(ORMObject):
386 def __init__(self, arch_string = None, description = None):
387 self.arch_string = arch_string
388 self.description = description
390 def __eq__(self, val):
391 if isinstance(val, str):
392 return (self.arch_string== val)
393 # This signals to use the normal comparison operator
394 return NotImplemented
396 def __ne__(self, val):
397 if isinstance(val, str):
398 return (self.arch_string != val)
399 # This signals to use the normal comparison operator
400 return NotImplemented
402 def properties(self):
403 return ['arch_string', 'arch_id', 'suites_count']
405 def not_null_constraints(self):
406 return ['arch_string']
408 __all__.append('Architecture')
411 def get_architecture(architecture, session=None):
413 Returns database id for given C{architecture}.
415 @type architecture: string
416 @param architecture: The name of the architecture
418 @type session: Session
419 @param session: Optional SQLA session object (a temporary one will be
420 generated if not supplied)
423 @return: Architecture object for the given arch (None if not present)
426 q = session.query(Architecture).filter_by(arch_string=architecture)
430 except NoResultFound:
433 __all__.append('get_architecture')
435 ################################################################################
437 class Archive(object):
438 def __init__(self, *args, **kwargs):
442 return '<Archive %s>' % self.archive_name
444 __all__.append('Archive')
447 def get_archive(archive, session=None):
449 returns database id for given C{archive}.
451 @type archive: string
452 @param archive: the name of the arhive
454 @type session: Session
455 @param session: Optional SQLA session object (a temporary one will be
456 generated if not supplied)
459 @return: Archive object for the given name (None if not present)
462 archive = archive.lower()
464 q = session.query(Archive).filter_by(archive_name=archive)
468 except NoResultFound:
471 __all__.append('get_archive')
473 ################################################################################
475 class ArchiveFile(object):
476 def __init__(self, archive=None, component=None, file=None):
477 self.archive = archive
478 self.component = component
482 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
484 __all__.append('ArchiveFile')
486 ################################################################################
488 class BinContents(ORMObject):
489 def __init__(self, file = None, binary = None):
493 def properties(self):
494 return ['file', 'binary']
496 __all__.append('BinContents')
498 ################################################################################
500 class DBBinary(ORMObject):
501 def __init__(self, package = None, source = None, version = None, \
502 maintainer = None, architecture = None, poolfile = None, \
503 binarytype = 'deb', fingerprint=None):
504 self.package = package
506 self.version = version
507 self.maintainer = maintainer
508 self.architecture = architecture
509 self.poolfile = poolfile
510 self.binarytype = binarytype
511 self.fingerprint = fingerprint
515 return self.binary_id
517 def properties(self):
518 return ['package', 'version', 'maintainer', 'source', 'architecture', \
519 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
520 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
522 def not_null_constraints(self):
523 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
526 metadata = association_proxy('key', 'value')
528 def scan_contents(self):
530 Yields the contents of the package. Only regular files are yielded and
531 the path names are normalized after converting them from either utf-8
532 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
533 package does not contain any regular file.
535 fullpath = self.poolfile.fullpath
536 dpkg_cmd = ('dpkg-deb', '--fsys-tarfile', fullpath)
537 dpkg = daklib.daksubprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE)
538 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
539 for member in tar.getmembers():
540 if not member.isdir():
541 name = normpath(member.name)
542 # enforce proper utf-8 encoding
545 except UnicodeDecodeError:
546 name = name.decode('iso8859-1').encode('utf-8')
552 def read_control(self):
554 Reads the control information from a binary.
557 @return: stanza text of the control section.
560 fullpath = self.poolfile.fullpath
561 deb_file = open(fullpath, 'r')
562 stanza = utils.deb_extract_control(deb_file)
567 def read_control_fields(self):
569 Reads the control information from a binary and return
573 @return: fields of the control section as a dictionary.
576 stanza = self.read_control()
577 return apt_pkg.TagSection(stanza)
579 __all__.append('DBBinary')
582 def get_suites_binary_in(package, session=None):
584 Returns list of Suite objects which given C{package} name is in
587 @param package: DBBinary package name to search for
590 @return: list of Suite objects for the given package
593 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
595 __all__.append('get_suites_binary_in')
598 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
600 Returns the component name of the newest binary package in suite_list or
601 None if no package is found. The result can be optionally filtered by a list
602 of architecture names.
605 @param package: DBBinary package name to search for
607 @type suite_list: list of str
608 @param suite_list: list of suite_name items
610 @type arch_list: list of str
611 @param arch_list: optional list of arch_string items that defaults to []
613 @rtype: str or NoneType
614 @return: name of component or None
617 q = session.query(DBBinary).filter_by(package = package). \
618 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
619 if len(arch_list) > 0:
620 q = q.join(DBBinary.architecture). \
621 filter(Architecture.arch_string.in_(arch_list))
622 binary = q.order_by(desc(DBBinary.version)).first()
626 return binary.poolfile.component.component_name
628 __all__.append('get_component_by_package_suite')
630 ################################################################################
632 class BuildQueue(object):
633 def __init__(self, *args, **kwargs):
637 return '<BuildQueue %s>' % self.queue_name
639 __all__.append('BuildQueue')
641 ################################################################################
643 class Component(ORMObject):
644 def __init__(self, component_name = None):
645 self.component_name = component_name
647 def __eq__(self, val):
648 if isinstance(val, str):
649 return (self.component_name == val)
650 # This signals to use the normal comparison operator
651 return NotImplemented
653 def __ne__(self, val):
654 if isinstance(val, str):
655 return (self.component_name != val)
656 # This signals to use the normal comparison operator
657 return NotImplemented
659 def properties(self):
660 return ['component_name', 'component_id', 'description', \
661 'meets_dfsg', 'overrides_count']
663 def not_null_constraints(self):
664 return ['component_name']
667 __all__.append('Component')
670 def get_component(component, session=None):
672 Returns database id for given C{component}.
674 @type component: string
675 @param component: The name of the override type
678 @return: the database id for the given component
681 component = component.lower()
683 q = session.query(Component).filter_by(component_name=component)
687 except NoResultFound:
690 __all__.append('get_component')
693 def get_mapped_component(component_name, session=None):
694 """get component after mappings
696 Evaluate component mappings from ComponentMappings in dak.conf for the
697 given component name.
699 @todo: ansgar wants to get rid of this. It's currently only used for
702 @type component_name: str
703 @param component_name: component name
705 @param session: database session
707 @rtype: L{daklib.dbconn.Component} or C{None}
708 @return: component after applying maps or C{None}
711 for m in cnf.value_list("ComponentMappings"):
712 (src, dst) = m.split()
713 if component_name == src:
715 component = session.query(Component).filter_by(component_name=component_name).first()
718 __all__.append('get_mapped_component')
721 def get_component_names(session=None):
723 Returns list of strings of component names.
726 @return: list of strings of component names
729 return [ x.component_name for x in session.query(Component).all() ]
731 __all__.append('get_component_names')
733 ################################################################################
735 class DBConfig(object):
736 def __init__(self, *args, **kwargs):
740 return '<DBConfig %s>' % self.name
742 __all__.append('DBConfig')
744 ################################################################################
747 def get_or_set_contents_file_id(filename, session=None):
749 Returns database id for given filename.
751 If no matching file is found, a row is inserted.
753 @type filename: string
754 @param filename: The filename
755 @type session: SQLAlchemy
756 @param session: Optional SQL session object (a temporary one will be
757 generated if not supplied). If not passed, a commit will be performed at
758 the end of the function, otherwise the caller is responsible for commiting.
761 @return: the database id for the given component
764 q = session.query(ContentFilename).filter_by(filename=filename)
767 ret = q.one().cafilename_id
768 except NoResultFound:
769 cf = ContentFilename()
770 cf.filename = filename
772 session.commit_or_flush()
773 ret = cf.cafilename_id
777 __all__.append('get_or_set_contents_file_id')
780 def get_contents(suite, overridetype, section=None, session=None):
782 Returns contents for a suite / overridetype combination, limiting
783 to a section if not None.
786 @param suite: Suite object
788 @type overridetype: OverrideType
789 @param overridetype: OverrideType object
791 @type section: Section
792 @param section: Optional section object to limit results to
794 @type session: SQLAlchemy
795 @param session: Optional SQL session object (a temporary one will be
796 generated if not supplied)
799 @return: ResultsProxy object set up to return tuples of (filename, section,
803 # find me all of the contents for a given suite
804 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
808 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
809 JOIN content_file_names n ON (c.filename=n.id)
810 JOIN binaries b ON (b.id=c.binary_pkg)
811 JOIN override o ON (o.package=b.package)
812 JOIN section s ON (s.id=o.section)
813 WHERE o.suite = :suiteid AND o.type = :overridetypeid
814 AND b.type=:overridetypename"""
816 vals = {'suiteid': suite.suite_id,
817 'overridetypeid': overridetype.overridetype_id,
818 'overridetypename': overridetype.overridetype}
820 if section is not None:
821 contents_q += " AND s.id = :sectionid"
822 vals['sectionid'] = section.section_id
824 contents_q += " ORDER BY fn"
826 return session.execute(contents_q, vals)
828 __all__.append('get_contents')
830 ################################################################################
832 class ContentFilepath(object):
833 def __init__(self, *args, **kwargs):
837 return '<ContentFilepath %s>' % self.filepath
839 __all__.append('ContentFilepath')
842 def get_or_set_contents_path_id(filepath, session=None):
844 Returns database id for given path.
846 If no matching file is found, a row is inserted.
848 @type filepath: string
849 @param filepath: The filepath
851 @type session: SQLAlchemy
852 @param session: Optional SQL session object (a temporary one will be
853 generated if not supplied). If not passed, a commit will be performed at
854 the end of the function, otherwise the caller is responsible for commiting.
857 @return: the database id for the given path
860 q = session.query(ContentFilepath).filter_by(filepath=filepath)
863 ret = q.one().cafilepath_id
864 except NoResultFound:
865 cf = ContentFilepath()
866 cf.filepath = filepath
868 session.commit_or_flush()
869 ret = cf.cafilepath_id
873 __all__.append('get_or_set_contents_path_id')
875 ################################################################################
877 class ContentAssociation(object):
878 def __init__(self, *args, **kwargs):
882 return '<ContentAssociation %s>' % self.ca_id
884 __all__.append('ContentAssociation')
886 def insert_content_paths(binary_id, fullpaths, session=None):
888 Make sure given path is associated with given binary id
891 @param binary_id: the id of the binary
892 @type fullpaths: list
893 @param fullpaths: the list of paths of the file being associated with the binary
894 @type session: SQLAlchemy session
895 @param session: Optional SQLAlchemy session. If this is passed, the caller
896 is responsible for ensuring a transaction has begun and committing the
897 results or rolling back based on the result code. If not passed, a commit
898 will be performed at the end of the function, otherwise the caller is
899 responsible for commiting.
901 @return: True upon success
906 session = DBConn().session()
911 def generate_path_dicts():
912 for fullpath in fullpaths:
913 if fullpath.startswith( './' ):
914 fullpath = fullpath[2:]
916 yield {'filename':fullpath, 'id': binary_id }
918 for d in generate_path_dicts():
919 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
928 traceback.print_exc()
930 # Only rollback if we set up the session ourself
937 __all__.append('insert_content_paths')
939 ################################################################################
941 class DSCFile(object):
942 def __init__(self, *args, **kwargs):
946 return '<DSCFile %s>' % self.dscfile_id
948 __all__.append('DSCFile')
951 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
953 Returns a list of DSCFiles which may be empty
955 @type dscfile_id: int (optional)
956 @param dscfile_id: the dscfile_id of the DSCFiles to find
958 @type source_id: int (optional)
959 @param source_id: the source id related to the DSCFiles to find
961 @type poolfile_id: int (optional)
962 @param poolfile_id: the poolfile id related to the DSCFiles to find
965 @return: Possibly empty list of DSCFiles
968 q = session.query(DSCFile)
970 if dscfile_id is not None:
971 q = q.filter_by(dscfile_id=dscfile_id)
973 if source_id is not None:
974 q = q.filter_by(source_id=source_id)
976 if poolfile_id is not None:
977 q = q.filter_by(poolfile_id=poolfile_id)
981 __all__.append('get_dscfiles')
983 ################################################################################
985 class ExternalOverride(ORMObject):
986 def __init__(self, *args, **kwargs):
990 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
992 __all__.append('ExternalOverride')
994 ################################################################################
996 class PoolFile(ORMObject):
997 def __init__(self, filename = None, filesize = -1, \
999 self.filename = filename
1000 self.filesize = filesize
1001 self.md5sum = md5sum
1005 session = DBConn().session().object_session(self)
1006 af = session.query(ArchiveFile).join(Archive) \
1007 .filter(ArchiveFile.file == self) \
1008 .order_by(Archive.tainted.desc()).first()
1012 def component(self):
1013 session = DBConn().session().object_session(self)
1014 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1015 .group_by(ArchiveFile.component_id).one()
1016 return session.query(Component).get(component_id)
1020 return os.path.basename(self.filename)
1022 def is_valid(self, filesize = -1, md5sum = None):
1023 return self.filesize == long(filesize) and self.md5sum == md5sum
1025 def properties(self):
1026 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1027 'sha256sum', 'source', 'binary', 'last_used']
1029 def not_null_constraints(self):
1030 return ['filename', 'md5sum']
1032 def identical_to(self, filename):
1034 compare size and hash with the given file
1037 @return: true if the given file has the same size and hash as this object; false otherwise
1039 st = os.stat(filename)
1040 if self.filesize != st.st_size:
1043 f = open(filename, "r")
1044 sha256sum = apt_pkg.sha256sum(f)
1045 if sha256sum != self.sha256sum:
1050 __all__.append('PoolFile')
1053 def get_poolfile_like_name(filename, session=None):
1055 Returns an array of PoolFile objects which are like the given name
1057 @type filename: string
1058 @param filename: the filename of the file to check against the DB
1061 @return: array of PoolFile objects
1064 # TODO: There must be a way of properly using bind parameters with %FOO%
1065 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1069 __all__.append('get_poolfile_like_name')
1071 ################################################################################
1073 class Fingerprint(ORMObject):
1074 def __init__(self, fingerprint = None):
1075 self.fingerprint = fingerprint
1077 def properties(self):
1078 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1081 def not_null_constraints(self):
1082 return ['fingerprint']
1084 __all__.append('Fingerprint')
1087 def get_fingerprint(fpr, session=None):
1089 Returns Fingerprint object for given fpr.
1092 @param fpr: The fpr to find / add
1094 @type session: SQLAlchemy
1095 @param session: Optional SQL session object (a temporary one will be
1096 generated if not supplied).
1099 @return: the Fingerprint object for the given fpr or None
1102 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1106 except NoResultFound:
1111 __all__.append('get_fingerprint')
1114 def get_or_set_fingerprint(fpr, session=None):
1116 Returns Fingerprint object for given fpr.
1118 If no matching fpr is found, a row is inserted.
1121 @param fpr: The fpr to find / add
1123 @type session: SQLAlchemy
1124 @param session: Optional SQL session object (a temporary one will be
1125 generated if not supplied). If not passed, a commit will be performed at
1126 the end of the function, otherwise the caller is responsible for commiting.
1127 A flush will be performed either way.
1130 @return: the Fingerprint object for the given fpr
1133 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1137 except NoResultFound:
1138 fingerprint = Fingerprint()
1139 fingerprint.fingerprint = fpr
1140 session.add(fingerprint)
1141 session.commit_or_flush()
1146 __all__.append('get_or_set_fingerprint')
1148 ################################################################################
1150 # Helper routine for Keyring class
1151 def get_ldap_name(entry):
1153 for k in ["cn", "mn", "sn"]:
1155 if ret and ret[0] != "" and ret[0] != "-":
1157 return " ".join(name)
1159 ################################################################################
1161 class Keyring(object):
1162 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1163 " --with-colons --fingerprint --fingerprint"
1168 def __init__(self, *args, **kwargs):
1172 return '<Keyring %s>' % self.keyring_name
1174 def de_escape_gpg_str(self, txt):
1175 esclist = re.split(r'(\\x..)', txt)
1176 for x in range(1,len(esclist),2):
1177 esclist[x] = "%c" % (int(esclist[x][2:],16))
1178 return "".join(esclist)
1180 def parse_address(self, uid):
1181 """parses uid and returns a tuple of real name and email address"""
1183 (name, address) = email.Utils.parseaddr(uid)
1184 name = re.sub(r"\s*[(].*[)]", "", name)
1185 name = self.de_escape_gpg_str(name)
1188 return (name, address)
1190 def load_keys(self, keyring):
1191 if not self.keyring_id:
1192 raise Exception('Must be initialized with database information')
1194 k = os.popen(self.gpg_invocation % keyring, "r")
1196 need_fingerprint = False
1199 field = line.split(":")
1200 if field[0] == "pub":
1203 (name, addr) = self.parse_address(field[9])
1205 self.keys[key]["email"] = addr
1206 self.keys[key]["name"] = name
1207 need_fingerprint = True
1208 elif key and field[0] == "uid":
1209 (name, addr) = self.parse_address(field[9])
1210 if "email" not in self.keys[key] and "@" in addr:
1211 self.keys[key]["email"] = addr
1212 self.keys[key]["name"] = name
1213 elif need_fingerprint and field[0] == "fpr":
1214 self.keys[key]["fingerprints"] = [field[9]]
1215 self.fpr_lookup[field[9]] = key
1216 need_fingerprint = False
1218 def import_users_from_ldap(self, session):
1222 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1223 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1224 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1226 l = ldap.open(LDAPServer)
1229 # TODO: This should request a new context and use
1230 # connection-specific options (i.e. "l.set_option(...)")
1232 # Request a new TLS context. If there was already one, libldap
1233 # would not change the TLS options (like which CAs to trust).
1234 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1235 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1236 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1237 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1240 l.simple_bind_s("","")
1241 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1242 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1243 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1245 ldap_fin_uid_id = {}
1252 uid = entry["uid"][0]
1253 name = get_ldap_name(entry)
1254 fingerprints = entry["keyFingerPrint"]
1256 for f in fingerprints:
1257 key = self.fpr_lookup.get(f, None)
1258 if key not in self.keys:
1260 self.keys[key]["uid"] = uid
1264 keyid = get_or_set_uid(uid, session).uid_id
1265 byuid[keyid] = (uid, name)
1266 byname[uid] = (keyid, name)
1268 return (byname, byuid)
1270 def generate_users_from_keyring(self, format, session):
1274 for x in self.keys.keys():
1275 if "email" not in self.keys[x]:
1277 self.keys[x]["uid"] = format % "invalid-uid"
1279 uid = format % self.keys[x]["email"]
1280 keyid = get_or_set_uid(uid, session).uid_id
1281 byuid[keyid] = (uid, self.keys[x]["name"])
1282 byname[uid] = (keyid, self.keys[x]["name"])
1283 self.keys[x]["uid"] = uid
1286 uid = format % "invalid-uid"
1287 keyid = get_or_set_uid(uid, session).uid_id
1288 byuid[keyid] = (uid, "ungeneratable user id")
1289 byname[uid] = (keyid, "ungeneratable user id")
1291 return (byname, byuid)
1293 __all__.append('Keyring')
1296 def get_keyring(keyring, session=None):
1298 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1299 If C{keyring} already has an entry, simply return the existing Keyring
1301 @type keyring: string
1302 @param keyring: the keyring name
1305 @return: the Keyring object for this keyring
1308 q = session.query(Keyring).filter_by(keyring_name=keyring)
1312 except NoResultFound:
1315 __all__.append('get_keyring')
1318 def get_active_keyring_paths(session=None):
1321 @return: list of active keyring paths
1323 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1325 __all__.append('get_active_keyring_paths')
1328 def get_primary_keyring_path(session=None):
1330 Get the full path to the highest priority active keyring
1333 @return: path to the active keyring with the highest priority or None if no
1334 keyring is configured
1336 keyrings = get_active_keyring_paths()
1338 if len(keyrings) > 0:
1343 __all__.append('get_primary_keyring_path')
1345 ################################################################################
1347 class DBChange(object):
1348 def __init__(self, *args, **kwargs):
1352 return '<DBChange %s>' % self.changesname
1354 __all__.append('DBChange')
1357 def get_dbchange(filename, session=None):
1359 returns DBChange object for given C{filename}.
1361 @type filename: string
1362 @param filename: the name of the file
1364 @type session: Session
1365 @param session: Optional SQLA session object (a temporary one will be
1366 generated if not supplied)
1369 @return: DBChange object for the given filename (C{None} if not present)
1372 q = session.query(DBChange).filter_by(changesname=filename)
1376 except NoResultFound:
1379 __all__.append('get_dbchange')
1381 ################################################################################
1383 class Maintainer(ORMObject):
1384 def __init__(self, name = None):
1387 def properties(self):
1388 return ['name', 'maintainer_id']
1390 def not_null_constraints(self):
1393 def get_split_maintainer(self):
1394 if not hasattr(self, 'name') or self.name is None:
1395 return ('', '', '', '')
1397 return fix_maintainer(self.name.strip())
1399 __all__.append('Maintainer')
1402 def get_or_set_maintainer(name, session=None):
1404 Returns Maintainer object for given maintainer name.
1406 If no matching maintainer name is found, a row is inserted.
1409 @param name: The maintainer name to add
1411 @type session: SQLAlchemy
1412 @param session: Optional SQL session object (a temporary one will be
1413 generated if not supplied). If not passed, a commit will be performed at
1414 the end of the function, otherwise the caller is responsible for commiting.
1415 A flush will be performed either way.
1418 @return: the Maintainer object for the given maintainer
1421 q = session.query(Maintainer).filter_by(name=name)
1424 except NoResultFound:
1425 maintainer = Maintainer()
1426 maintainer.name = name
1427 session.add(maintainer)
1428 session.commit_or_flush()
1433 __all__.append('get_or_set_maintainer')
1436 def get_maintainer(maintainer_id, session=None):
1438 Return the name of the maintainer behind C{maintainer_id} or None if that
1439 maintainer_id is invalid.
1441 @type maintainer_id: int
1442 @param maintainer_id: the id of the maintainer
1445 @return: the Maintainer with this C{maintainer_id}
1448 return session.query(Maintainer).get(maintainer_id)
1450 __all__.append('get_maintainer')
1452 ################################################################################
1454 class NewComment(object):
1455 def __init__(self, *args, **kwargs):
1459 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1461 __all__.append('NewComment')
1464 def has_new_comment(policy_queue, package, version, session=None):
1466 Returns true if the given combination of C{package}, C{version} has a comment.
1468 @type package: string
1469 @param package: name of the package
1471 @type version: string
1472 @param version: package version
1474 @type session: Session
1475 @param session: Optional SQLA session object (a temporary one will be
1476 generated if not supplied)
1482 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1483 q = q.filter_by(package=package)
1484 q = q.filter_by(version=version)
1486 return bool(q.count() > 0)
1488 __all__.append('has_new_comment')
1491 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1493 Returns (possibly empty) list of NewComment objects for the given
1496 @type package: string (optional)
1497 @param package: name of the package
1499 @type version: string (optional)
1500 @param version: package version
1502 @type comment_id: int (optional)
1503 @param comment_id: An id of a comment
1505 @type session: Session
1506 @param session: Optional SQLA session object (a temporary one will be
1507 generated if not supplied)
1510 @return: A (possibly empty) list of NewComment objects will be returned
1513 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1514 if package is not None: q = q.filter_by(package=package)
1515 if version is not None: q = q.filter_by(version=version)
1516 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1520 __all__.append('get_new_comments')
1522 ################################################################################
1524 class Override(ORMObject):
1525 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1526 section = None, priority = None):
1527 self.package = package
1529 self.component = component
1530 self.overridetype = overridetype
1531 self.section = section
1532 self.priority = priority
1534 def properties(self):
1535 return ['package', 'suite', 'component', 'overridetype', 'section', \
1538 def not_null_constraints(self):
1539 return ['package', 'suite', 'component', 'overridetype', 'section']
1541 __all__.append('Override')
1544 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1546 Returns Override object for the given parameters
1548 @type package: string
1549 @param package: The name of the package
1551 @type suite: string, list or None
1552 @param suite: The name of the suite (or suites if a list) to limit to. If
1553 None, don't limit. Defaults to None.
1555 @type component: string, list or None
1556 @param component: The name of the component (or components if a list) to
1557 limit to. If None, don't limit. Defaults to None.
1559 @type overridetype: string, list or None
1560 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1561 limit to. If None, don't limit. Defaults to None.
1563 @type session: Session
1564 @param session: Optional SQLA session object (a temporary one will be
1565 generated if not supplied)
1568 @return: A (possibly empty) list of Override objects will be returned
1571 q = session.query(Override)
1572 q = q.filter_by(package=package)
1574 if suite is not None:
1575 if not isinstance(suite, list): suite = [suite]
1576 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1578 if component is not None:
1579 if not isinstance(component, list): component = [component]
1580 q = q.join(Component).filter(Component.component_name.in_(component))
1582 if overridetype is not None:
1583 if not isinstance(overridetype, list): overridetype = [overridetype]
1584 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1588 __all__.append('get_override')
1591 ################################################################################
1593 class OverrideType(ORMObject):
1594 def __init__(self, overridetype = None):
1595 self.overridetype = overridetype
1597 def properties(self):
1598 return ['overridetype', 'overridetype_id', 'overrides_count']
1600 def not_null_constraints(self):
1601 return ['overridetype']
1603 __all__.append('OverrideType')
1606 def get_override_type(override_type, session=None):
1608 Returns OverrideType object for given C{override type}.
1610 @type override_type: string
1611 @param override_type: The name of the override type
1613 @type session: Session
1614 @param session: Optional SQLA session object (a temporary one will be
1615 generated if not supplied)
1618 @return: the database id for the given override type
1621 q = session.query(OverrideType).filter_by(overridetype=override_type)
1625 except NoResultFound:
1628 __all__.append('get_override_type')
1630 ################################################################################
1632 class PolicyQueue(object):
1633 def __init__(self, *args, **kwargs):
1637 return '<PolicyQueue %s>' % self.queue_name
1639 __all__.append('PolicyQueue')
1642 def get_policy_queue(queuename, session=None):
1644 Returns PolicyQueue object for given C{queue name}
1646 @type queuename: string
1647 @param queuename: The name of the queue
1649 @type session: Session
1650 @param session: Optional SQLA session object (a temporary one will be
1651 generated if not supplied)
1654 @return: PolicyQueue object for the given queue
1657 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1661 except NoResultFound:
1664 __all__.append('get_policy_queue')
1666 ################################################################################
1668 class PolicyQueueUpload(object):
1669 def __cmp__(self, other):
1670 ret = cmp(self.changes.source, other.changes.source)
1672 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1674 if self.source is not None and other.source is None:
1676 elif self.source is None and other.source is not None:
1679 ret = cmp(self.changes.changesname, other.changes.changesname)
1682 __all__.append('PolicyQueueUpload')
1684 ################################################################################
1686 class PolicyQueueByhandFile(object):
1689 __all__.append('PolicyQueueByhandFile')
1691 ################################################################################
1693 class Priority(ORMObject):
1694 def __init__(self, priority = None, level = None):
1695 self.priority = priority
1698 def properties(self):
1699 return ['priority', 'priority_id', 'level', 'overrides_count']
1701 def not_null_constraints(self):
1702 return ['priority', 'level']
1704 def __eq__(self, val):
1705 if isinstance(val, str):
1706 return (self.priority == val)
1707 # This signals to use the normal comparison operator
1708 return NotImplemented
1710 def __ne__(self, val):
1711 if isinstance(val, str):
1712 return (self.priority != val)
1713 # This signals to use the normal comparison operator
1714 return NotImplemented
1716 __all__.append('Priority')
1719 def get_priority(priority, session=None):
1721 Returns Priority object for given C{priority name}.
1723 @type priority: string
1724 @param priority: The name of the priority
1726 @type session: Session
1727 @param session: Optional SQLA session object (a temporary one will be
1728 generated if not supplied)
1731 @return: Priority object for the given priority
1734 q = session.query(Priority).filter_by(priority=priority)
1738 except NoResultFound:
1741 __all__.append('get_priority')
1744 def get_priorities(session=None):
1746 Returns dictionary of priority names -> id mappings
1748 @type session: Session
1749 @param session: Optional SQL session object (a temporary one will be
1750 generated if not supplied)
1753 @return: dictionary of priority names -> id mappings
1757 q = session.query(Priority)
1759 ret[x.priority] = x.priority_id
1763 __all__.append('get_priorities')
1765 ################################################################################
1767 class Section(ORMObject):
1768 def __init__(self, section = None):
1769 self.section = section
1771 def properties(self):
1772 return ['section', 'section_id', 'overrides_count']
1774 def not_null_constraints(self):
1777 def __eq__(self, val):
1778 if isinstance(val, str):
1779 return (self.section == val)
1780 # This signals to use the normal comparison operator
1781 return NotImplemented
1783 def __ne__(self, val):
1784 if isinstance(val, str):
1785 return (self.section != val)
1786 # This signals to use the normal comparison operator
1787 return NotImplemented
1789 __all__.append('Section')
1792 def get_section(section, session=None):
1794 Returns Section object for given C{section name}.
1796 @type section: string
1797 @param section: The name of the section
1799 @type session: Session
1800 @param session: Optional SQLA session object (a temporary one will be
1801 generated if not supplied)
1804 @return: Section object for the given section name
1807 q = session.query(Section).filter_by(section=section)
1811 except NoResultFound:
1814 __all__.append('get_section')
1817 def get_sections(session=None):
1819 Returns dictionary of section names -> id mappings
1821 @type session: Session
1822 @param session: Optional SQL session object (a temporary one will be
1823 generated if not supplied)
1826 @return: dictionary of section names -> id mappings
1830 q = session.query(Section)
1832 ret[x.section] = x.section_id
1836 __all__.append('get_sections')
1838 ################################################################################
1840 class SignatureHistory(ORMObject):
1842 def from_signed_file(cls, signed_file):
1843 """signature history entry from signed file
1845 @type signed_file: L{daklib.gpg.SignedFile}
1846 @param signed_file: signed file
1848 @rtype: L{SignatureHistory}
1851 self.fingerprint = signed_file.primary_fingerprint
1852 self.signature_timestamp = signed_file.signature_timestamp
1853 self.contents_sha1 = signed_file.contents_sha1()
1856 __all__.append('SignatureHistory')
1858 ################################################################################
1860 class SrcContents(ORMObject):
1861 def __init__(self, file = None, source = None):
1863 self.source = source
1865 def properties(self):
1866 return ['file', 'source']
1868 __all__.append('SrcContents')
1870 ################################################################################
1872 from debian.debfile import Deb822
1874 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1875 class Dak822(Deb822):
1876 def _internal_parser(self, sequence, fields=None):
1877 # The key is non-whitespace, non-colon characters before any colon.
1878 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1879 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1880 multi = re.compile(key_part + r"$")
1881 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1883 wanted_field = lambda f: fields is None or f in fields
1885 if isinstance(sequence, basestring):
1886 sequence = sequence.splitlines()
1890 for line in self.gpg_stripped_paragraph(sequence):
1891 m = single.match(line)
1894 self[curkey] = content
1896 if not wanted_field(m.group('key')):
1900 curkey = m.group('key')
1901 content = m.group('data')
1904 m = multi.match(line)
1907 self[curkey] = content
1909 if not wanted_field(m.group('key')):
1913 curkey = m.group('key')
1917 m = multidata.match(line)
1919 content += '\n' + line # XXX not m.group('data')?
1923 self[curkey] = content
1926 class DBSource(ORMObject):
1927 def __init__(self, source = None, version = None, maintainer = None, \
1928 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1929 self.source = source
1930 self.version = version
1931 self.maintainer = maintainer
1932 self.changedby = changedby
1933 self.poolfile = poolfile
1934 self.install_date = install_date
1935 self.fingerprint = fingerprint
1939 return self.source_id
1941 def properties(self):
1942 return ['source', 'source_id', 'maintainer', 'changedby', \
1943 'fingerprint', 'poolfile', 'version', 'suites_count', \
1944 'install_date', 'binaries_count', 'uploaders_count']
1946 def not_null_constraints(self):
1947 return ['source', 'version', 'install_date', 'maintainer', \
1948 'changedby', 'poolfile']
1950 def read_control_fields(self):
1952 Reads the control information from a dsc
1955 @return: fields is the dsc information in a dictionary form
1957 fullpath = self.poolfile.fullpath
1958 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1961 metadata = association_proxy('key', 'value')
1963 def scan_contents(self):
1965 Returns a set of names for non directories. The path names are
1966 normalized after converting them from either utf-8 or iso8859-1
1969 fullpath = self.poolfile.fullpath
1970 from daklib.contents import UnpackedSource
1971 unpacked = UnpackedSource(fullpath)
1973 for name in unpacked.get_all_filenames():
1974 # enforce proper utf-8 encoding
1976 name.decode('utf-8')
1977 except UnicodeDecodeError:
1978 name = name.decode('iso8859-1').encode('utf-8')
1982 __all__.append('DBSource')
1985 def source_exists(source, source_version, suites = ["any"], session=None):
1987 Ensure that source exists somewhere in the archive for the binary
1988 upload being processed.
1989 1. exact match => 1.0-3
1990 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
1992 @type source: string
1993 @param source: source name
1995 @type source_version: string
1996 @param source_version: expected source version
1999 @param suites: list of suites to check in, default I{any}
2001 @type session: Session
2002 @param session: Optional SQLA session object (a temporary one will be
2003 generated if not supplied)
2006 @return: returns 1 if a source with expected version is found, otherwise 0
2013 from daklib.regexes import re_bin_only_nmu
2014 orig_source_version = re_bin_only_nmu.sub('', source_version)
2016 for suite in suites:
2017 q = session.query(DBSource).filter_by(source=source). \
2018 filter(DBSource.version.in_([source_version, orig_source_version]))
2020 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2021 s = get_suite(suite, session)
2023 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2024 considered_suites = [ vc.reference for vc in enhances_vcs ]
2025 considered_suites.append(s)
2027 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2032 # No source found so return not ok
2037 __all__.append('source_exists')
2040 def get_suites_source_in(source, session=None):
2042 Returns list of Suite objects which given C{source} name is in
2045 @param source: DBSource package name to search for
2048 @return: list of Suite objects for the given source
2051 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2053 __all__.append('get_suites_source_in')
2056 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2058 Returns list of DBSource objects for given C{source} name and other parameters
2061 @param source: DBSource package name to search for
2063 @type version: str or None
2064 @param version: DBSource version name to search for or None if not applicable
2066 @type dm_upload_allowed: bool
2067 @param dm_upload_allowed: If None, no effect. If True or False, only
2068 return packages with that dm_upload_allowed setting
2070 @type session: Session
2071 @param session: Optional SQL session object (a temporary one will be
2072 generated if not supplied)
2075 @return: list of DBSource objects for the given name (may be empty)
2078 q = session.query(DBSource).filter_by(source=source)
2080 if version is not None:
2081 q = q.filter_by(version=version)
2083 if dm_upload_allowed is not None:
2084 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2088 __all__.append('get_sources_from_name')
2090 # FIXME: This function fails badly if it finds more than 1 source package and
2091 # its implementation is trivial enough to be inlined.
2093 def get_source_in_suite(source, suite, session=None):
2095 Returns a DBSource object for a combination of C{source} and C{suite}.
2097 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2098 - B{suite} - a suite name, eg. I{unstable}
2100 @type source: string
2101 @param source: source package name
2104 @param suite: the suite name
2107 @return: the version for I{source} in I{suite}
2111 q = get_suite(suite, session).get_sources(source)
2114 except NoResultFound:
2117 __all__.append('get_source_in_suite')
2120 def import_metadata_into_db(obj, session=None):
2122 This routine works on either DBBinary or DBSource objects and imports
2123 their metadata into the database
2125 fields = obj.read_control_fields()
2126 for k in fields.keys():
2129 val = str(fields[k])
2130 except UnicodeEncodeError:
2131 # Fall back to UTF-8
2133 val = fields[k].encode('utf-8')
2134 except UnicodeEncodeError:
2135 # Finally try iso8859-1
2136 val = fields[k].encode('iso8859-1')
2137 # Otherwise we allow the exception to percolate up and we cause
2138 # a reject as someone is playing silly buggers
2140 obj.metadata[get_or_set_metadatakey(k, session)] = val
2142 session.commit_or_flush()
2144 __all__.append('import_metadata_into_db')
2146 ################################################################################
2148 class SrcFormat(object):
2149 def __init__(self, *args, **kwargs):
2153 return '<SrcFormat %s>' % (self.format_name)
2155 __all__.append('SrcFormat')
2157 ################################################################################
2159 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2160 ('SuiteID', 'suite_id'),
2161 ('Version', 'version'),
2162 ('Origin', 'origin'),
2164 ('Description', 'description'),
2165 ('Untouchable', 'untouchable'),
2166 ('Announce', 'announce'),
2167 ('Codename', 'codename'),
2168 ('OverrideCodename', 'overridecodename'),
2169 ('ValidTime', 'validtime'),
2170 ('Priority', 'priority'),
2171 ('NotAutomatic', 'notautomatic'),
2172 ('CopyChanges', 'copychanges'),
2173 ('OverrideSuite', 'overridesuite')]
2175 # Why the heck don't we have any UNIQUE constraints in table suite?
2176 # TODO: Add UNIQUE constraints for appropriate columns.
2177 class Suite(ORMObject):
2178 def __init__(self, suite_name = None, version = None):
2179 self.suite_name = suite_name
2180 self.version = version
2182 def properties(self):
2183 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2186 def not_null_constraints(self):
2187 return ['suite_name']
2189 def __eq__(self, val):
2190 if isinstance(val, str):
2191 return (self.suite_name == val)
2192 # This signals to use the normal comparison operator
2193 return NotImplemented
2195 def __ne__(self, val):
2196 if isinstance(val, str):
2197 return (self.suite_name != val)
2198 # This signals to use the normal comparison operator
2199 return NotImplemented
2203 for disp, field in SUITE_FIELDS:
2204 val = getattr(self, field, None)
2206 ret.append("%s: %s" % (disp, val))
2208 return "\n".join(ret)
2210 def get_architectures(self, skipsrc=False, skipall=False):
2212 Returns list of Architecture objects
2214 @type skipsrc: boolean
2215 @param skipsrc: Whether to skip returning the 'source' architecture entry
2218 @type skipall: boolean
2219 @param skipall: Whether to skip returning the 'all' architecture entry
2223 @return: list of Architecture objects for the given name (may be empty)
2226 q = object_session(self).query(Architecture).with_parent(self)
2228 q = q.filter(Architecture.arch_string != 'source')
2230 q = q.filter(Architecture.arch_string != 'all')
2231 return q.order_by(Architecture.arch_string).all()
2233 def get_sources(self, source):
2235 Returns a query object representing DBSource that is part of C{suite}.
2237 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2239 @type source: string
2240 @param source: source package name
2242 @rtype: sqlalchemy.orm.query.Query
2243 @return: a query of DBSource
2247 session = object_session(self)
2248 return session.query(DBSource).filter_by(source = source). \
2251 def get_overridesuite(self):
2252 if self.overridesuite is None:
2255 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2259 return os.path.join(self.archive.path, 'dists', self.suite_name)
2261 __all__.append('Suite')
2264 def get_suite(suite, session=None):
2266 Returns Suite object for given C{suite name}.
2269 @param suite: The name of the suite
2271 @type session: Session
2272 @param session: Optional SQLA session object (a temporary one will be
2273 generated if not supplied)
2276 @return: Suite object for the requested suite name (None if not present)
2279 q = session.query(Suite).filter_by(suite_name=suite)
2283 except NoResultFound:
2286 __all__.append('get_suite')
2288 ################################################################################
2291 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2293 Returns list of Architecture objects for given C{suite} name. The list is
2294 empty if suite does not exist.
2297 @param suite: Suite name to search for
2299 @type skipsrc: boolean
2300 @param skipsrc: Whether to skip returning the 'source' architecture entry
2303 @type skipall: boolean
2304 @param skipall: Whether to skip returning the 'all' architecture entry
2307 @type session: Session
2308 @param session: Optional SQL session object (a temporary one will be
2309 generated if not supplied)
2312 @return: list of Architecture objects for the given name (may be empty)
2316 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2317 except AttributeError:
2320 __all__.append('get_suite_architectures')
2322 ################################################################################
2324 class Uid(ORMObject):
2325 def __init__(self, uid = None, name = None):
2329 def __eq__(self, val):
2330 if isinstance(val, str):
2331 return (self.uid == val)
2332 # This signals to use the normal comparison operator
2333 return NotImplemented
2335 def __ne__(self, val):
2336 if isinstance(val, str):
2337 return (self.uid != val)
2338 # This signals to use the normal comparison operator
2339 return NotImplemented
2341 def properties(self):
2342 return ['uid', 'name', 'fingerprint']
2344 def not_null_constraints(self):
2347 __all__.append('Uid')
2350 def get_or_set_uid(uidname, session=None):
2352 Returns uid object for given uidname.
2354 If no matching uidname is found, a row is inserted.
2356 @type uidname: string
2357 @param uidname: The uid to add
2359 @type session: SQLAlchemy
2360 @param session: Optional SQL session object (a temporary one will be
2361 generated if not supplied). If not passed, a commit will be performed at
2362 the end of the function, otherwise the caller is responsible for commiting.
2365 @return: the uid object for the given uidname
2368 q = session.query(Uid).filter_by(uid=uidname)
2372 except NoResultFound:
2376 session.commit_or_flush()
2381 __all__.append('get_or_set_uid')
2384 def get_uid_from_fingerprint(fpr, session=None):
2385 q = session.query(Uid)
2386 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2390 except NoResultFound:
2393 __all__.append('get_uid_from_fingerprint')
2395 ################################################################################
2397 class MetadataKey(ORMObject):
2398 def __init__(self, key = None):
2401 def properties(self):
2404 def not_null_constraints(self):
2407 __all__.append('MetadataKey')
2410 def get_or_set_metadatakey(keyname, session=None):
2412 Returns MetadataKey object for given uidname.
2414 If no matching keyname is found, a row is inserted.
2416 @type uidname: string
2417 @param uidname: The keyname to add
2419 @type session: SQLAlchemy
2420 @param session: Optional SQL session object (a temporary one will be
2421 generated if not supplied). If not passed, a commit will be performed at
2422 the end of the function, otherwise the caller is responsible for commiting.
2425 @return: the metadatakey object for the given keyname
2428 q = session.query(MetadataKey).filter_by(key=keyname)
2432 except NoResultFound:
2433 ret = MetadataKey(keyname)
2435 session.commit_or_flush()
2439 __all__.append('get_or_set_metadatakey')
2441 ################################################################################
2443 class BinaryMetadata(ORMObject):
2444 def __init__(self, key = None, value = None, binary = None):
2447 self.binary = binary
2449 def properties(self):
2450 return ['binary', 'key', 'value']
2452 def not_null_constraints(self):
2455 __all__.append('BinaryMetadata')
2457 ################################################################################
2459 class SourceMetadata(ORMObject):
2460 def __init__(self, key = None, value = None, source = None):
2463 self.source = source
2465 def properties(self):
2466 return ['source', 'key', 'value']
2468 def not_null_constraints(self):
2471 __all__.append('SourceMetadata')
2473 ################################################################################
2475 class VersionCheck(ORMObject):
2476 def __init__(self, *args, **kwargs):
2479 def properties(self):
2480 #return ['suite_id', 'check', 'reference_id']
2483 def not_null_constraints(self):
2484 return ['suite', 'check', 'reference']
2486 __all__.append('VersionCheck')
2489 def get_version_checks(suite_name, check = None, session = None):
2490 suite = get_suite(suite_name, session)
2492 # Make sure that what we return is iterable so that list comprehensions
2493 # involving this don't cause a traceback
2495 q = session.query(VersionCheck).filter_by(suite=suite)
2497 q = q.filter_by(check=check)
2500 __all__.append('get_version_checks')
2502 ################################################################################
2504 class DBConn(object):
2506 database module init.
2510 def __init__(self, *args, **kwargs):
2511 self.__dict__ = self.__shared_state
2513 if not getattr(self, 'initialised', False):
2514 self.initialised = True
2515 self.debug = kwargs.has_key('debug')
2518 def __setuptables(self):
2521 'acl_architecture_map',
2522 'acl_fingerprint_map',
2529 'binaries_metadata',
2537 'external_overrides',
2538 'extra_src_references',
2540 'files_archive_map',
2546 # TODO: the maintainer column in table override should be removed.
2550 'policy_queue_upload',
2551 'policy_queue_upload_binaries_map',
2552 'policy_queue_byhand_file',
2555 'signature_history',
2564 'suite_architectures',
2565 'suite_build_queue_copy',
2566 'suite_src_formats',
2572 'almost_obsolete_all_associations',
2573 'almost_obsolete_src_associations',
2574 'any_associations_source',
2575 'bin_associations_binaries',
2576 'binaries_suite_arch',
2579 'newest_all_associations',
2580 'newest_any_associations',
2582 'newest_src_association',
2583 'obsolete_all_associations',
2584 'obsolete_any_associations',
2585 'obsolete_any_by_all_associations',
2586 'obsolete_src_associations',
2588 'src_associations_bin',
2589 'src_associations_src',
2590 'suite_arch_by_name',
2593 for table_name in tables:
2594 table = Table(table_name, self.db_meta, \
2595 autoload=True, useexisting=True)
2596 setattr(self, 'tbl_%s' % table_name, table)
2598 for view_name in views:
2599 view = Table(view_name, self.db_meta, autoload=True)
2600 setattr(self, 'view_%s' % view_name, view)
2602 def __setupmappers(self):
2603 mapper(Architecture, self.tbl_architecture,
2604 properties = dict(arch_id = self.tbl_architecture.c.id,
2605 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2606 order_by=self.tbl_suite.c.suite_name,
2607 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2608 extension = validator)
2610 mapper(ACL, self.tbl_acl,
2612 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2613 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2614 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2615 per_source = relation(ACLPerSource, collection_class=set),
2618 mapper(ACLPerSource, self.tbl_acl_per_source,
2620 acl = relation(ACL),
2621 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2622 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2625 mapper(Archive, self.tbl_archive,
2626 properties = dict(archive_id = self.tbl_archive.c.id,
2627 archive_name = self.tbl_archive.c.name))
2629 mapper(ArchiveFile, self.tbl_files_archive_map,
2630 properties = dict(archive = relation(Archive, backref='files'),
2631 component = relation(Component),
2632 file = relation(PoolFile, backref='archives')))
2634 mapper(BuildQueue, self.tbl_build_queue,
2635 properties = dict(queue_id = self.tbl_build_queue.c.id,
2636 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2638 mapper(DBBinary, self.tbl_binaries,
2639 properties = dict(binary_id = self.tbl_binaries.c.id,
2640 package = self.tbl_binaries.c.package,
2641 version = self.tbl_binaries.c.version,
2642 maintainer_id = self.tbl_binaries.c.maintainer,
2643 maintainer = relation(Maintainer),
2644 source_id = self.tbl_binaries.c.source,
2645 source = relation(DBSource, backref='binaries'),
2646 arch_id = self.tbl_binaries.c.architecture,
2647 architecture = relation(Architecture),
2648 poolfile_id = self.tbl_binaries.c.file,
2649 poolfile = relation(PoolFile),
2650 binarytype = self.tbl_binaries.c.type,
2651 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2652 fingerprint = relation(Fingerprint),
2653 install_date = self.tbl_binaries.c.install_date,
2654 suites = relation(Suite, secondary=self.tbl_bin_associations,
2655 backref=backref('binaries', lazy='dynamic')),
2656 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2657 backref=backref('extra_binary_references', lazy='dynamic')),
2658 key = relation(BinaryMetadata, cascade='all',
2659 collection_class=attribute_mapped_collection('key'))),
2660 extension = validator)
2662 mapper(Component, self.tbl_component,
2663 properties = dict(component_id = self.tbl_component.c.id,
2664 component_name = self.tbl_component.c.name),
2665 extension = validator)
2667 mapper(DBConfig, self.tbl_config,
2668 properties = dict(config_id = self.tbl_config.c.id))
2670 mapper(DSCFile, self.tbl_dsc_files,
2671 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2672 source_id = self.tbl_dsc_files.c.source,
2673 source = relation(DBSource),
2674 poolfile_id = self.tbl_dsc_files.c.file,
2675 poolfile = relation(PoolFile)))
2677 mapper(ExternalOverride, self.tbl_external_overrides,
2679 suite_id = self.tbl_external_overrides.c.suite,
2680 suite = relation(Suite),
2681 component_id = self.tbl_external_overrides.c.component,
2682 component = relation(Component)))
2684 mapper(PoolFile, self.tbl_files,
2685 properties = dict(file_id = self.tbl_files.c.id,
2686 filesize = self.tbl_files.c.size),
2687 extension = validator)
2689 mapper(Fingerprint, self.tbl_fingerprint,
2690 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2691 uid_id = self.tbl_fingerprint.c.uid,
2692 uid = relation(Uid),
2693 keyring_id = self.tbl_fingerprint.c.keyring,
2694 keyring = relation(Keyring),
2695 acl = relation(ACL)),
2696 extension = validator)
2698 mapper(Keyring, self.tbl_keyrings,
2699 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2700 keyring_id = self.tbl_keyrings.c.id,
2701 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2703 mapper(DBChange, self.tbl_changes,
2704 properties = dict(change_id = self.tbl_changes.c.id,
2705 seen = self.tbl_changes.c.seen,
2706 source = self.tbl_changes.c.source,
2707 binaries = self.tbl_changes.c.binaries,
2708 architecture = self.tbl_changes.c.architecture,
2709 distribution = self.tbl_changes.c.distribution,
2710 urgency = self.tbl_changes.c.urgency,
2711 maintainer = self.tbl_changes.c.maintainer,
2712 changedby = self.tbl_changes.c.changedby,
2713 date = self.tbl_changes.c.date,
2714 version = self.tbl_changes.c.version))
2716 mapper(Maintainer, self.tbl_maintainer,
2717 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2718 maintains_sources = relation(DBSource, backref='maintainer',
2719 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2720 changed_sources = relation(DBSource, backref='changedby',
2721 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2722 extension = validator)
2724 mapper(NewComment, self.tbl_new_comments,
2725 properties = dict(comment_id = self.tbl_new_comments.c.id,
2726 policy_queue = relation(PolicyQueue)))
2728 mapper(Override, self.tbl_override,
2729 properties = dict(suite_id = self.tbl_override.c.suite,
2730 suite = relation(Suite, \
2731 backref=backref('overrides', lazy='dynamic')),
2732 package = self.tbl_override.c.package,
2733 component_id = self.tbl_override.c.component,
2734 component = relation(Component, \
2735 backref=backref('overrides', lazy='dynamic')),
2736 priority_id = self.tbl_override.c.priority,
2737 priority = relation(Priority, \
2738 backref=backref('overrides', lazy='dynamic')),
2739 section_id = self.tbl_override.c.section,
2740 section = relation(Section, \
2741 backref=backref('overrides', lazy='dynamic')),
2742 overridetype_id = self.tbl_override.c.type,
2743 overridetype = relation(OverrideType, \
2744 backref=backref('overrides', lazy='dynamic'))))
2746 mapper(OverrideType, self.tbl_override_type,
2747 properties = dict(overridetype = self.tbl_override_type.c.type,
2748 overridetype_id = self.tbl_override_type.c.id))
2750 mapper(PolicyQueue, self.tbl_policy_queue,
2751 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2752 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2754 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2756 changes = relation(DBChange),
2757 policy_queue = relation(PolicyQueue, backref='uploads'),
2758 target_suite = relation(Suite),
2759 source = relation(DBSource),
2760 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2763 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2765 upload = relation(PolicyQueueUpload, backref='byhand'),
2769 mapper(Priority, self.tbl_priority,
2770 properties = dict(priority_id = self.tbl_priority.c.id))
2772 mapper(Section, self.tbl_section,
2773 properties = dict(section_id = self.tbl_section.c.id,
2774 section=self.tbl_section.c.section))
2776 mapper(SignatureHistory, self.tbl_signature_history)
2778 mapper(DBSource, self.tbl_source,
2779 properties = dict(source_id = self.tbl_source.c.id,
2780 version = self.tbl_source.c.version,
2781 maintainer_id = self.tbl_source.c.maintainer,
2782 poolfile_id = self.tbl_source.c.file,
2783 poolfile = relation(PoolFile),
2784 fingerprint_id = self.tbl_source.c.sig_fpr,
2785 fingerprint = relation(Fingerprint),
2786 changedby_id = self.tbl_source.c.changedby,
2787 srcfiles = relation(DSCFile,
2788 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2789 suites = relation(Suite, secondary=self.tbl_src_associations,
2790 backref=backref('sources', lazy='dynamic')),
2791 uploaders = relation(Maintainer,
2792 secondary=self.tbl_src_uploaders),
2793 key = relation(SourceMetadata, cascade='all',
2794 collection_class=attribute_mapped_collection('key'))),
2795 extension = validator)
2797 mapper(SrcFormat, self.tbl_src_format,
2798 properties = dict(src_format_id = self.tbl_src_format.c.id,
2799 format_name = self.tbl_src_format.c.format_name))
2801 mapper(Suite, self.tbl_suite,
2802 properties = dict(suite_id = self.tbl_suite.c.id,
2803 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2804 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2805 copy_queues = relation(BuildQueue,
2806 secondary=self.tbl_suite_build_queue_copy),
2807 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2808 backref=backref('suites', lazy='dynamic')),
2809 archive = relation(Archive, backref='suites'),
2810 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2811 components = relation(Component, secondary=self.tbl_component_suite,
2812 order_by=self.tbl_component.c.ordering,
2813 backref=backref('suites'))),
2814 extension = validator)
2816 mapper(Uid, self.tbl_uid,
2817 properties = dict(uid_id = self.tbl_uid.c.id,
2818 fingerprint = relation(Fingerprint)),
2819 extension = validator)
2821 mapper(BinContents, self.tbl_bin_contents,
2823 binary = relation(DBBinary,
2824 backref=backref('contents', lazy='dynamic', cascade='all')),
2825 file = self.tbl_bin_contents.c.file))
2827 mapper(SrcContents, self.tbl_src_contents,
2829 source = relation(DBSource,
2830 backref=backref('contents', lazy='dynamic', cascade='all')),
2831 file = self.tbl_src_contents.c.file))
2833 mapper(MetadataKey, self.tbl_metadata_keys,
2835 key_id = self.tbl_metadata_keys.c.key_id,
2836 key = self.tbl_metadata_keys.c.key))
2838 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2840 binary_id = self.tbl_binaries_metadata.c.bin_id,
2841 binary = relation(DBBinary),
2842 key_id = self.tbl_binaries_metadata.c.key_id,
2843 key = relation(MetadataKey),
2844 value = self.tbl_binaries_metadata.c.value))
2846 mapper(SourceMetadata, self.tbl_source_metadata,
2848 source_id = self.tbl_source_metadata.c.src_id,
2849 source = relation(DBSource),
2850 key_id = self.tbl_source_metadata.c.key_id,
2851 key = relation(MetadataKey),
2852 value = self.tbl_source_metadata.c.value))
2854 mapper(VersionCheck, self.tbl_version_check,
2856 suite_id = self.tbl_version_check.c.suite,
2857 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2858 reference_id = self.tbl_version_check.c.reference,
2859 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2861 ## Connection functions
2862 def __createconn(self):
2863 from config import Config
2865 if cnf.has_key("DB::Service"):
2866 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2867 elif cnf.has_key("DB::Host"):
2869 connstr = "postgresql://%s" % cnf["DB::Host"]
2870 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2871 connstr += ":%s" % cnf["DB::Port"]
2872 connstr += "/%s" % cnf["DB::Name"]
2875 connstr = "postgresql:///%s" % cnf["DB::Name"]
2876 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2877 connstr += "?port=%s" % cnf["DB::Port"]
2879 engine_args = { 'echo': self.debug }
2880 if cnf.has_key('DB::PoolSize'):
2881 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2882 if cnf.has_key('DB::MaxOverflow'):
2883 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2884 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2885 cnf['DB::Unicode'] == 'false':
2886 engine_args['use_native_unicode'] = False
2888 # Monkey patch a new dialect in in order to support service= syntax
2889 import sqlalchemy.dialects.postgresql
2890 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2891 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2892 def create_connect_args(self, url):
2893 if str(url).startswith('postgresql://service='):
2895 servicename = str(url)[21:]
2896 return (['service=%s' % servicename], {})
2898 return PGDialect_psycopg2.create_connect_args(self, url)
2900 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2903 self.db_pg = create_engine(connstr, **engine_args)
2904 self.db_meta = MetaData()
2905 self.db_meta.bind = self.db_pg
2906 self.db_smaker = sessionmaker(bind=self.db_pg,
2910 self.__setuptables()
2911 self.__setupmappers()
2913 except OperationalError as e:
2915 utils.fubar("Cannot connect to database (%s)" % str(e))
2917 self.pid = os.getpid()
2919 def session(self, work_mem = 0):
2921 Returns a new session object. If a work_mem parameter is provided a new
2922 transaction is started and the work_mem parameter is set for this
2923 transaction. The work_mem parameter is measured in MB. A default value
2924 will be used if the parameter is not set.
2926 # reinitialize DBConn in new processes
2927 if self.pid != os.getpid():
2930 session = self.db_smaker()
2932 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2935 __all__.append('DBConn')