5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 import daklib.daksubprocess
39 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from tarfile import TarFile
57 from inspect import getargspec
60 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
62 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
63 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
64 from sqlalchemy import types as sqltypes
65 from sqlalchemy.orm.collections import attribute_mapped_collection
66 from sqlalchemy.ext.associationproxy import association_proxy
68 # Don't remove this, we re-export the exceptions to scripts which import us
69 from sqlalchemy.exc import *
70 from sqlalchemy.orm.exc import NoResultFound
72 # Only import Config until Queue stuff is changed to store its config
74 from config import Config
75 from textutils import fix_maintainer
76 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
78 # suppress some deprecation warnings in squeeze related to sqlalchemy
80 warnings.filterwarnings('ignore', \
81 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
83 warnings.filterwarnings('ignore', \
84 "Predicate of partial index .* ignored during reflection", \
88 ################################################################################
90 # Patch in support for the debversion field type so that it works during
94 # that is for sqlalchemy 0.6
95 UserDefinedType = sqltypes.UserDefinedType
97 # this one for sqlalchemy 0.5
98 UserDefinedType = sqltypes.TypeEngine
100 class DebVersion(UserDefinedType):
101 def get_col_spec(self):
104 def bind_processor(self, dialect):
107 # ' = None' is needed for sqlalchemy 0.5:
108 def result_processor(self, dialect, coltype = None):
111 sa_major_version = sqlalchemy.__version__[0:3]
112 if sa_major_version in ["0.5", "0.6", "0.7", "0.8", "0.9"]:
113 from sqlalchemy.databases import postgres
114 postgres.ischema_names['debversion'] = DebVersion
116 raise Exception("dak only ported to SQLA versions 0.5 to 0.9. See daklib/dbconn.py")
118 ################################################################################
120 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
122 ################################################################################
124 def session_wrapper(fn):
126 Wrapper around common ".., session=None):" handling. If the wrapped
127 function is called without passing 'session', we create a local one
128 and destroy it when the function ends.
130 Also attaches a commit_or_flush method to the session; if we created a
131 local session, this is a synonym for session.commit(), otherwise it is a
132 synonym for session.flush().
135 def wrapped(*args, **kwargs):
136 private_transaction = False
138 # Find the session object
139 session = kwargs.get('session')
142 if len(args) <= len(getargspec(fn)[0]) - 1:
143 # No session specified as last argument or in kwargs
144 private_transaction = True
145 session = kwargs['session'] = DBConn().session()
147 # Session is last argument in args
151 session = args[-1] = DBConn().session()
152 private_transaction = True
154 if private_transaction:
155 session.commit_or_flush = session.commit
157 session.commit_or_flush = session.flush
160 return fn(*args, **kwargs)
162 if private_transaction:
163 # We created a session; close it.
166 wrapped.__doc__ = fn.__doc__
167 wrapped.func_name = fn.func_name
171 __all__.append('session_wrapper')
173 ################################################################################
175 class ORMObject(object):
177 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
178 derived classes must implement the properties() method.
181 def properties(self):
183 This method should be implemented by all derived classes and returns a
184 list of the important properties. The properties 'created' and
185 'modified' will be added automatically. A suffix '_count' should be
186 added to properties that are lists or query objects. The most important
187 property name should be returned as the first element in the list
188 because it is used by repr().
194 Returns a JSON representation of the object based on the properties
195 returned from the properties() method.
198 # add created and modified
199 all_properties = self.properties() + ['created', 'modified']
200 for property in all_properties:
201 # check for list or query
202 if property[-6:] == '_count':
203 real_property = property[:-6]
204 if not hasattr(self, real_property):
206 value = getattr(self, real_property)
207 if hasattr(value, '__len__'):
210 elif hasattr(value, 'count'):
211 # query (but not during validation)
212 if self.in_validation:
214 value = value.count()
216 raise KeyError('Do not understand property %s.' % property)
218 if not hasattr(self, property):
221 value = getattr(self, property)
225 elif isinstance(value, ORMObject):
226 # use repr() for ORMObject types
229 # we want a string for all other types because json cannot
232 data[property] = value
233 return json.dumps(data)
237 Returns the name of the class.
239 return type(self).__name__
243 Returns a short string representation of the object using the first
244 element from the properties() method.
246 primary_property = self.properties()[0]
247 value = getattr(self, primary_property)
248 return '<%s %s>' % (self.classname(), str(value))
252 Returns a human readable form of the object using the properties()
255 return '<%s %s>' % (self.classname(), self.json())
257 def not_null_constraints(self):
259 Returns a list of properties that must be not NULL. Derived classes
260 should override this method if needed.
264 validation_message = \
265 "Validation failed because property '%s' must not be empty in object\n%s"
267 in_validation = False
271 This function validates the not NULL constraints as returned by
272 not_null_constraints(). It raises the DBUpdateError exception if
275 for property in self.not_null_constraints():
276 # TODO: It is a bit awkward that the mapper configuration allow
277 # directly setting the numeric _id columns. We should get rid of it
279 if hasattr(self, property + '_id') and \
280 getattr(self, property + '_id') is not None:
282 if not hasattr(self, property) or getattr(self, property) is None:
283 # str() might lead to races due to a 2nd flush
284 self.in_validation = True
285 message = self.validation_message % (property, str(self))
286 self.in_validation = False
287 raise DBUpdateError(message)
291 def get(cls, primary_key, session = None):
293 This is a support function that allows getting an object by its primary
296 Architecture.get(3[, session])
298 instead of the more verbose
300 session.query(Architecture).get(3)
302 return session.query(cls).get(primary_key)
304 def session(self, replace = False):
306 Returns the current session that is associated with the object. May
307 return None is object is in detached state.
310 return object_session(self)
312 def clone(self, session = None):
314 Clones the current object in a new session and returns the new clone. A
315 fresh session is created if the optional session parameter is not
316 provided. The function will fail if a session is provided and has
319 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
320 an existing object to allow several threads to work with their own
321 instances of an ORMObject.
323 WARNING: Only persistent (committed) objects can be cloned. Changes
324 made to the original object that are not committed yet will get lost.
325 The session of the new object will always be rolled back to avoid
329 if self.session() is None:
330 raise RuntimeError( \
331 'Method clone() failed for detached object:\n%s' % self)
332 self.session().flush()
333 mapper = object_mapper(self)
334 primary_key = mapper.primary_key_from_instance(self)
335 object_class = self.__class__
337 session = DBConn().session()
338 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
339 raise RuntimeError( \
340 'Method clone() failed due to unflushed changes in session.')
341 new_object = session.query(object_class).get(primary_key)
343 if new_object is None:
344 raise RuntimeError( \
345 'Method clone() failed for non-persistent object:\n%s' % self)
348 __all__.append('ORMObject')
350 ################################################################################
352 class Validator(MapperExtension):
354 This class calls the validate() method for each instance for the
355 'before_update' and 'before_insert' events. A global object validator is
356 used for configuring the individual mappers.
359 def before_update(self, mapper, connection, instance):
363 def before_insert(self, mapper, connection, instance):
367 validator = Validator()
369 ################################################################################
371 class ACL(ORMObject):
373 return "<ACL {0}>".format(self.name)
375 __all__.append('ACL')
377 class ACLPerSource(ORMObject):
379 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
381 __all__.append('ACLPerSource')
383 ################################################################################
385 class Architecture(ORMObject):
386 def __init__(self, arch_string = None, description = None):
387 self.arch_string = arch_string
388 self.description = description
390 def __eq__(self, val):
391 if isinstance(val, str):
392 return (self.arch_string== val)
393 # This signals to use the normal comparison operator
394 return NotImplemented
396 def __ne__(self, val):
397 if isinstance(val, str):
398 return (self.arch_string != val)
399 # This signals to use the normal comparison operator
400 return NotImplemented
402 def properties(self):
403 return ['arch_string', 'arch_id', 'suites_count']
405 def not_null_constraints(self):
406 return ['arch_string']
408 __all__.append('Architecture')
411 def get_architecture(architecture, session=None):
413 Returns database id for given C{architecture}.
415 @type architecture: string
416 @param architecture: The name of the architecture
418 @type session: Session
419 @param session: Optional SQLA session object (a temporary one will be
420 generated if not supplied)
423 @return: Architecture object for the given arch (None if not present)
426 q = session.query(Architecture).filter_by(arch_string=architecture)
430 except NoResultFound:
433 __all__.append('get_architecture')
435 ################################################################################
437 class Archive(object):
438 def __init__(self, *args, **kwargs):
442 return '<Archive %s>' % self.archive_name
444 __all__.append('Archive')
447 def get_archive(archive, session=None):
449 returns database id for given C{archive}.
451 @type archive: string
452 @param archive: the name of the arhive
454 @type session: Session
455 @param session: Optional SQLA session object (a temporary one will be
456 generated if not supplied)
459 @return: Archive object for the given name (None if not present)
462 archive = archive.lower()
464 q = session.query(Archive).filter_by(archive_name=archive)
468 except NoResultFound:
471 __all__.append('get_archive')
473 ################################################################################
475 class ArchiveFile(object):
476 def __init__(self, archive=None, component=None, file=None):
477 self.archive = archive
478 self.component = component
482 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
484 __all__.append('ArchiveFile')
486 ################################################################################
488 class BinContents(ORMObject):
489 def __init__(self, file = None, binary = None):
493 def properties(self):
494 return ['file', 'binary']
496 __all__.append('BinContents')
498 ################################################################################
500 class DBBinary(ORMObject):
501 def __init__(self, package = None, source = None, version = None, \
502 maintainer = None, architecture = None, poolfile = None, \
503 binarytype = 'deb', fingerprint=None):
504 self.package = package
506 self.version = version
507 self.maintainer = maintainer
508 self.architecture = architecture
509 self.poolfile = poolfile
510 self.binarytype = binarytype
511 self.fingerprint = fingerprint
515 return self.binary_id
517 def properties(self):
518 return ['package', 'version', 'maintainer', 'source', 'architecture', \
519 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
520 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
522 def not_null_constraints(self):
523 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
526 metadata = association_proxy('key', 'value')
528 def scan_contents(self):
530 Yields the contents of the package. Only regular files are yielded and
531 the path names are normalized after converting them from either utf-8
532 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
533 package does not contain any regular file.
535 fullpath = self.poolfile.fullpath
536 dpkg_cmd = ('dpkg-deb', '--fsys-tarfile', fullpath)
537 dpkg = daklib.daksubprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE)
538 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
539 for member in tar.getmembers():
540 if not member.isdir():
541 name = normpath(member.name)
542 # enforce proper utf-8 encoding
545 except UnicodeDecodeError:
546 name = name.decode('iso8859-1').encode('utf-8')
552 def read_control(self):
554 Reads the control information from a binary.
557 @return: stanza text of the control section.
560 fullpath = self.poolfile.fullpath
561 with open(fullpath, 'r') as deb_file:
562 return utils.deb_extract_control(deb_file)
564 def read_control_fields(self):
566 Reads the control information from a binary and return
570 @return: fields of the control section as a dictionary.
572 stanza = self.read_control()
573 return apt_pkg.TagSection(stanza)
577 session = object_session(self)
578 query = session.query(BinaryMetadata).filter_by(binary=self)
579 return MetadataProxy(session, query)
581 __all__.append('DBBinary')
584 def get_suites_binary_in(package, session=None):
586 Returns list of Suite objects which given C{package} name is in
589 @param package: DBBinary package name to search for
592 @return: list of Suite objects for the given package
595 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
597 __all__.append('get_suites_binary_in')
600 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
602 Returns the component name of the newest binary package in suite_list or
603 None if no package is found. The result can be optionally filtered by a list
604 of architecture names.
607 @param package: DBBinary package name to search for
609 @type suite_list: list of str
610 @param suite_list: list of suite_name items
612 @type arch_list: list of str
613 @param arch_list: optional list of arch_string items that defaults to []
615 @rtype: str or NoneType
616 @return: name of component or None
619 q = session.query(DBBinary).filter_by(package = package). \
620 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
621 if len(arch_list) > 0:
622 q = q.join(DBBinary.architecture). \
623 filter(Architecture.arch_string.in_(arch_list))
624 binary = q.order_by(desc(DBBinary.version)).first()
628 return binary.poolfile.component.component_name
630 __all__.append('get_component_by_package_suite')
632 ################################################################################
634 class BuildQueue(object):
635 def __init__(self, *args, **kwargs):
639 return '<BuildQueue %s>' % self.queue_name
641 __all__.append('BuildQueue')
643 ################################################################################
645 class Component(ORMObject):
646 def __init__(self, component_name = None):
647 self.component_name = component_name
649 def __eq__(self, val):
650 if isinstance(val, str):
651 return (self.component_name == val)
652 # This signals to use the normal comparison operator
653 return NotImplemented
655 def __ne__(self, val):
656 if isinstance(val, str):
657 return (self.component_name != val)
658 # This signals to use the normal comparison operator
659 return NotImplemented
661 def properties(self):
662 return ['component_name', 'component_id', 'description', \
663 'meets_dfsg', 'overrides_count']
665 def not_null_constraints(self):
666 return ['component_name']
669 __all__.append('Component')
672 def get_component(component, session=None):
674 Returns database id for given C{component}.
676 @type component: string
677 @param component: The name of the override type
680 @return: the database id for the given component
683 component = component.lower()
685 q = session.query(Component).filter_by(component_name=component)
689 except NoResultFound:
692 __all__.append('get_component')
695 def get_mapped_component(component_name, session=None):
696 """get component after mappings
698 Evaluate component mappings from ComponentMappings in dak.conf for the
699 given component name.
701 @todo: ansgar wants to get rid of this. It's currently only used for
704 @type component_name: str
705 @param component_name: component name
707 @param session: database session
709 @rtype: L{daklib.dbconn.Component} or C{None}
710 @return: component after applying maps or C{None}
713 for m in cnf.value_list("ComponentMappings"):
714 (src, dst) = m.split()
715 if component_name == src:
717 component = session.query(Component).filter_by(component_name=component_name).first()
720 __all__.append('get_mapped_component')
723 def get_component_names(session=None):
725 Returns list of strings of component names.
728 @return: list of strings of component names
731 return [ x.component_name for x in session.query(Component).all() ]
733 __all__.append('get_component_names')
735 ################################################################################
737 class DBConfig(object):
738 def __init__(self, *args, **kwargs):
742 return '<DBConfig %s>' % self.name
744 __all__.append('DBConfig')
746 ################################################################################
749 def get_or_set_contents_file_id(filename, session=None):
751 Returns database id for given filename.
753 If no matching file is found, a row is inserted.
755 @type filename: string
756 @param filename: The filename
757 @type session: SQLAlchemy
758 @param session: Optional SQL session object (a temporary one will be
759 generated if not supplied). If not passed, a commit will be performed at
760 the end of the function, otherwise the caller is responsible for commiting.
763 @return: the database id for the given component
766 q = session.query(ContentFilename).filter_by(filename=filename)
769 ret = q.one().cafilename_id
770 except NoResultFound:
771 cf = ContentFilename()
772 cf.filename = filename
774 session.commit_or_flush()
775 ret = cf.cafilename_id
779 __all__.append('get_or_set_contents_file_id')
782 def get_contents(suite, overridetype, section=None, session=None):
784 Returns contents for a suite / overridetype combination, limiting
785 to a section if not None.
788 @param suite: Suite object
790 @type overridetype: OverrideType
791 @param overridetype: OverrideType object
793 @type section: Section
794 @param section: Optional section object to limit results to
796 @type session: SQLAlchemy
797 @param session: Optional SQL session object (a temporary one will be
798 generated if not supplied)
801 @return: ResultsProxy object set up to return tuples of (filename, section,
805 # find me all of the contents for a given suite
806 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
810 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
811 JOIN content_file_names n ON (c.filename=n.id)
812 JOIN binaries b ON (b.id=c.binary_pkg)
813 JOIN override o ON (o.package=b.package)
814 JOIN section s ON (s.id=o.section)
815 WHERE o.suite = :suiteid AND o.type = :overridetypeid
816 AND b.type=:overridetypename"""
818 vals = {'suiteid': suite.suite_id,
819 'overridetypeid': overridetype.overridetype_id,
820 'overridetypename': overridetype.overridetype}
822 if section is not None:
823 contents_q += " AND s.id = :sectionid"
824 vals['sectionid'] = section.section_id
826 contents_q += " ORDER BY fn"
828 return session.execute(contents_q, vals)
830 __all__.append('get_contents')
832 ################################################################################
834 class ContentFilepath(object):
835 def __init__(self, *args, **kwargs):
839 return '<ContentFilepath %s>' % self.filepath
841 __all__.append('ContentFilepath')
844 def get_or_set_contents_path_id(filepath, session=None):
846 Returns database id for given path.
848 If no matching file is found, a row is inserted.
850 @type filepath: string
851 @param filepath: The filepath
853 @type session: SQLAlchemy
854 @param session: Optional SQL session object (a temporary one will be
855 generated if not supplied). If not passed, a commit will be performed at
856 the end of the function, otherwise the caller is responsible for commiting.
859 @return: the database id for the given path
862 q = session.query(ContentFilepath).filter_by(filepath=filepath)
865 ret = q.one().cafilepath_id
866 except NoResultFound:
867 cf = ContentFilepath()
868 cf.filepath = filepath
870 session.commit_or_flush()
871 ret = cf.cafilepath_id
875 __all__.append('get_or_set_contents_path_id')
877 ################################################################################
879 class ContentAssociation(object):
880 def __init__(self, *args, **kwargs):
884 return '<ContentAssociation %s>' % self.ca_id
886 __all__.append('ContentAssociation')
888 def insert_content_paths(binary_id, fullpaths, session=None):
890 Make sure given path is associated with given binary id
893 @param binary_id: the id of the binary
894 @type fullpaths: list
895 @param fullpaths: the list of paths of the file being associated with the binary
896 @type session: SQLAlchemy session
897 @param session: Optional SQLAlchemy session. If this is passed, the caller
898 is responsible for ensuring a transaction has begun and committing the
899 results or rolling back based on the result code. If not passed, a commit
900 will be performed at the end of the function, otherwise the caller is
901 responsible for commiting.
903 @return: True upon success
908 session = DBConn().session()
913 def generate_path_dicts():
914 for fullpath in fullpaths:
915 if fullpath.startswith( './' ):
916 fullpath = fullpath[2:]
918 yield {'filename':fullpath, 'id': binary_id }
920 for d in generate_path_dicts():
921 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
930 traceback.print_exc()
932 # Only rollback if we set up the session ourself
939 __all__.append('insert_content_paths')
941 ################################################################################
943 class DSCFile(object):
944 def __init__(self, *args, **kwargs):
948 return '<DSCFile %s>' % self.dscfile_id
950 __all__.append('DSCFile')
953 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
955 Returns a list of DSCFiles which may be empty
957 @type dscfile_id: int (optional)
958 @param dscfile_id: the dscfile_id of the DSCFiles to find
960 @type source_id: int (optional)
961 @param source_id: the source id related to the DSCFiles to find
963 @type poolfile_id: int (optional)
964 @param poolfile_id: the poolfile id related to the DSCFiles to find
967 @return: Possibly empty list of DSCFiles
970 q = session.query(DSCFile)
972 if dscfile_id is not None:
973 q = q.filter_by(dscfile_id=dscfile_id)
975 if source_id is not None:
976 q = q.filter_by(source_id=source_id)
978 if poolfile_id is not None:
979 q = q.filter_by(poolfile_id=poolfile_id)
983 __all__.append('get_dscfiles')
985 ################################################################################
987 class ExternalOverride(ORMObject):
988 def __init__(self, *args, **kwargs):
992 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
994 __all__.append('ExternalOverride')
996 ################################################################################
998 class PoolFile(ORMObject):
999 def __init__(self, filename = None, filesize = -1, \
1001 self.filename = filename
1002 self.filesize = filesize
1003 self.md5sum = md5sum
1007 session = DBConn().session().object_session(self)
1008 af = session.query(ArchiveFile).join(Archive) \
1009 .filter(ArchiveFile.file == self) \
1010 .order_by(Archive.tainted.desc()).first()
1014 def component(self):
1015 session = DBConn().session().object_session(self)
1016 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1017 .group_by(ArchiveFile.component_id).one()
1018 return session.query(Component).get(component_id)
1022 return os.path.basename(self.filename)
1024 def is_valid(self, filesize = -1, md5sum = None):
1025 return self.filesize == long(filesize) and self.md5sum == md5sum
1027 def properties(self):
1028 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1029 'sha256sum', 'source', 'binary', 'last_used']
1031 def not_null_constraints(self):
1032 return ['filename', 'md5sum']
1034 def identical_to(self, filename):
1036 compare size and hash with the given file
1039 @return: true if the given file has the same size and hash as this object; false otherwise
1041 st = os.stat(filename)
1042 if self.filesize != st.st_size:
1045 f = open(filename, "r")
1046 sha256sum = apt_pkg.sha256sum(f)
1047 if sha256sum != self.sha256sum:
1052 __all__.append('PoolFile')
1055 def get_poolfile_like_name(filename, session=None):
1057 Returns an array of PoolFile objects which are like the given name
1059 @type filename: string
1060 @param filename: the filename of the file to check against the DB
1063 @return: array of PoolFile objects
1066 # TODO: There must be a way of properly using bind parameters with %FOO%
1067 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1071 __all__.append('get_poolfile_like_name')
1073 ################################################################################
1075 class Fingerprint(ORMObject):
1076 def __init__(self, fingerprint = None):
1077 self.fingerprint = fingerprint
1079 def properties(self):
1080 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1083 def not_null_constraints(self):
1084 return ['fingerprint']
1086 __all__.append('Fingerprint')
1089 def get_fingerprint(fpr, session=None):
1091 Returns Fingerprint object for given fpr.
1094 @param fpr: The fpr to find / add
1096 @type session: SQLAlchemy
1097 @param session: Optional SQL session object (a temporary one will be
1098 generated if not supplied).
1101 @return: the Fingerprint object for the given fpr or None
1104 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1108 except NoResultFound:
1113 __all__.append('get_fingerprint')
1116 def get_or_set_fingerprint(fpr, session=None):
1118 Returns Fingerprint object for given fpr.
1120 If no matching fpr is found, a row is inserted.
1123 @param fpr: The fpr to find / add
1125 @type session: SQLAlchemy
1126 @param session: Optional SQL session object (a temporary one will be
1127 generated if not supplied). If not passed, a commit will be performed at
1128 the end of the function, otherwise the caller is responsible for commiting.
1129 A flush will be performed either way.
1132 @return: the Fingerprint object for the given fpr
1135 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1139 except NoResultFound:
1140 fingerprint = Fingerprint()
1141 fingerprint.fingerprint = fpr
1142 session.add(fingerprint)
1143 session.commit_or_flush()
1148 __all__.append('get_or_set_fingerprint')
1150 ################################################################################
1152 # Helper routine for Keyring class
1153 def get_ldap_name(entry):
1155 for k in ["cn", "mn", "sn"]:
1157 if ret and ret[0] != "" and ret[0] != "-":
1159 return " ".join(name)
1161 ################################################################################
1163 class Keyring(object):
1167 def __init__(self, *args, **kwargs):
1171 return '<Keyring %s>' % self.keyring_name
1173 def de_escape_gpg_str(self, txt):
1174 esclist = re.split(r'(\\x..)', txt)
1175 for x in range(1,len(esclist),2):
1176 esclist[x] = "%c" % (int(esclist[x][2:],16))
1177 return "".join(esclist)
1179 def parse_address(self, uid):
1180 """parses uid and returns a tuple of real name and email address"""
1182 (name, address) = email.Utils.parseaddr(uid)
1183 name = re.sub(r"\s*[(].*[)]", "", name)
1184 name = self.de_escape_gpg_str(name)
1187 return (name, address)
1189 def load_keys(self, keyring):
1190 if not self.keyring_id:
1191 raise Exception('Must be initialized with database information')
1193 cmd = ["gpg", "--no-default-keyring", "--keyring", keyring,
1194 "--with-colons", "--fingerprint", "--fingerprint"]
1195 p = daklib.daksubprocess.Popen(cmd, stdout=subprocess.PIPE)
1198 need_fingerprint = False
1200 for line in p.stdout:
1201 field = line.split(":")
1202 if field[0] == "pub":
1205 (name, addr) = self.parse_address(field[9])
1207 self.keys[key]["email"] = addr
1208 self.keys[key]["name"] = name
1209 need_fingerprint = True
1210 elif key and field[0] == "uid":
1211 (name, addr) = self.parse_address(field[9])
1212 if "email" not in self.keys[key] and "@" in addr:
1213 self.keys[key]["email"] = addr
1214 self.keys[key]["name"] = name
1215 elif need_fingerprint and field[0] == "fpr":
1216 self.keys[key]["fingerprints"] = [field[9]]
1217 self.fpr_lookup[field[9]] = key
1218 need_fingerprint = False
1222 raise subprocess.CalledProcessError(r, cmd)
1224 def import_users_from_ldap(self, session):
1228 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1229 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1230 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1232 l = ldap.open(LDAPServer)
1235 # TODO: This should request a new context and use
1236 # connection-specific options (i.e. "l.set_option(...)")
1238 # Request a new TLS context. If there was already one, libldap
1239 # would not change the TLS options (like which CAs to trust).
1240 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1241 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1242 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1243 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1246 l.simple_bind_s("","")
1247 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1248 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1249 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1251 ldap_fin_uid_id = {}
1258 uid = entry["uid"][0]
1259 name = get_ldap_name(entry)
1260 fingerprints = entry["keyFingerPrint"]
1262 for f in fingerprints:
1263 key = self.fpr_lookup.get(f, None)
1264 if key not in self.keys:
1266 self.keys[key]["uid"] = uid
1270 keyid = get_or_set_uid(uid, session).uid_id
1271 byuid[keyid] = (uid, name)
1272 byname[uid] = (keyid, name)
1274 return (byname, byuid)
1276 def generate_users_from_keyring(self, format, session):
1280 for x in self.keys.keys():
1281 if "email" not in self.keys[x]:
1283 self.keys[x]["uid"] = format % "invalid-uid"
1285 uid = format % self.keys[x]["email"]
1286 keyid = get_or_set_uid(uid, session).uid_id
1287 byuid[keyid] = (uid, self.keys[x]["name"])
1288 byname[uid] = (keyid, self.keys[x]["name"])
1289 self.keys[x]["uid"] = uid
1292 uid = format % "invalid-uid"
1293 keyid = get_or_set_uid(uid, session).uid_id
1294 byuid[keyid] = (uid, "ungeneratable user id")
1295 byname[uid] = (keyid, "ungeneratable user id")
1297 return (byname, byuid)
1299 __all__.append('Keyring')
1302 def get_keyring(keyring, session=None):
1304 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1305 If C{keyring} already has an entry, simply return the existing Keyring
1307 @type keyring: string
1308 @param keyring: the keyring name
1311 @return: the Keyring object for this keyring
1314 q = session.query(Keyring).filter_by(keyring_name=keyring)
1318 except NoResultFound:
1321 __all__.append('get_keyring')
1324 def get_active_keyring_paths(session=None):
1327 @return: list of active keyring paths
1329 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1331 __all__.append('get_active_keyring_paths')
1334 def get_primary_keyring_path(session=None):
1336 Get the full path to the highest priority active keyring
1339 @return: path to the active keyring with the highest priority or None if no
1340 keyring is configured
1342 keyrings = get_active_keyring_paths()
1344 if len(keyrings) > 0:
1349 __all__.append('get_primary_keyring_path')
1351 ################################################################################
1353 class DBChange(object):
1354 def __init__(self, *args, **kwargs):
1358 return '<DBChange %s>' % self.changesname
1360 __all__.append('DBChange')
1363 def get_dbchange(filename, session=None):
1365 returns DBChange object for given C{filename}.
1367 @type filename: string
1368 @param filename: the name of the file
1370 @type session: Session
1371 @param session: Optional SQLA session object (a temporary one will be
1372 generated if not supplied)
1375 @return: DBChange object for the given filename (C{None} if not present)
1378 q = session.query(DBChange).filter_by(changesname=filename)
1382 except NoResultFound:
1385 __all__.append('get_dbchange')
1387 ################################################################################
1389 class Maintainer(ORMObject):
1390 def __init__(self, name = None):
1393 def properties(self):
1394 return ['name', 'maintainer_id']
1396 def not_null_constraints(self):
1399 def get_split_maintainer(self):
1400 if not hasattr(self, 'name') or self.name is None:
1401 return ('', '', '', '')
1403 return fix_maintainer(self.name.strip())
1405 __all__.append('Maintainer')
1408 def get_or_set_maintainer(name, session=None):
1410 Returns Maintainer object for given maintainer name.
1412 If no matching maintainer name is found, a row is inserted.
1415 @param name: The maintainer name to add
1417 @type session: SQLAlchemy
1418 @param session: Optional SQL session object (a temporary one will be
1419 generated if not supplied). If not passed, a commit will be performed at
1420 the end of the function, otherwise the caller is responsible for commiting.
1421 A flush will be performed either way.
1424 @return: the Maintainer object for the given maintainer
1427 q = session.query(Maintainer).filter_by(name=name)
1430 except NoResultFound:
1431 maintainer = Maintainer()
1432 maintainer.name = name
1433 session.add(maintainer)
1434 session.commit_or_flush()
1439 __all__.append('get_or_set_maintainer')
1442 def get_maintainer(maintainer_id, session=None):
1444 Return the name of the maintainer behind C{maintainer_id} or None if that
1445 maintainer_id is invalid.
1447 @type maintainer_id: int
1448 @param maintainer_id: the id of the maintainer
1451 @return: the Maintainer with this C{maintainer_id}
1454 return session.query(Maintainer).get(maintainer_id)
1456 __all__.append('get_maintainer')
1458 ################################################################################
1460 class NewComment(object):
1461 def __init__(self, *args, **kwargs):
1465 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1467 __all__.append('NewComment')
1470 def has_new_comment(policy_queue, package, version, session=None):
1472 Returns true if the given combination of C{package}, C{version} has a comment.
1474 @type package: string
1475 @param package: name of the package
1477 @type version: string
1478 @param version: package version
1480 @type session: Session
1481 @param session: Optional SQLA session object (a temporary one will be
1482 generated if not supplied)
1488 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1489 q = q.filter_by(package=package)
1490 q = q.filter_by(version=version)
1492 return bool(q.count() > 0)
1494 __all__.append('has_new_comment')
1497 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1499 Returns (possibly empty) list of NewComment objects for the given
1502 @type package: string (optional)
1503 @param package: name of the package
1505 @type version: string (optional)
1506 @param version: package version
1508 @type comment_id: int (optional)
1509 @param comment_id: An id of a comment
1511 @type session: Session
1512 @param session: Optional SQLA session object (a temporary one will be
1513 generated if not supplied)
1516 @return: A (possibly empty) list of NewComment objects will be returned
1519 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1520 if package is not None: q = q.filter_by(package=package)
1521 if version is not None: q = q.filter_by(version=version)
1522 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1526 __all__.append('get_new_comments')
1528 ################################################################################
1530 class Override(ORMObject):
1531 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1532 section = None, priority = None):
1533 self.package = package
1535 self.component = component
1536 self.overridetype = overridetype
1537 self.section = section
1538 self.priority = priority
1540 def properties(self):
1541 return ['package', 'suite', 'component', 'overridetype', 'section', \
1544 def not_null_constraints(self):
1545 return ['package', 'suite', 'component', 'overridetype', 'section']
1547 __all__.append('Override')
1550 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1552 Returns Override object for the given parameters
1554 @type package: string
1555 @param package: The name of the package
1557 @type suite: string, list or None
1558 @param suite: The name of the suite (or suites if a list) to limit to. If
1559 None, don't limit. Defaults to None.
1561 @type component: string, list or None
1562 @param component: The name of the component (or components if a list) to
1563 limit to. If None, don't limit. Defaults to None.
1565 @type overridetype: string, list or None
1566 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1567 limit to. If None, don't limit. Defaults to None.
1569 @type session: Session
1570 @param session: Optional SQLA session object (a temporary one will be
1571 generated if not supplied)
1574 @return: A (possibly empty) list of Override objects will be returned
1577 q = session.query(Override)
1578 q = q.filter_by(package=package)
1580 if suite is not None:
1581 if not isinstance(suite, list): suite = [suite]
1582 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1584 if component is not None:
1585 if not isinstance(component, list): component = [component]
1586 q = q.join(Component).filter(Component.component_name.in_(component))
1588 if overridetype is not None:
1589 if not isinstance(overridetype, list): overridetype = [overridetype]
1590 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1594 __all__.append('get_override')
1597 ################################################################################
1599 class OverrideType(ORMObject):
1600 def __init__(self, overridetype = None):
1601 self.overridetype = overridetype
1603 def properties(self):
1604 return ['overridetype', 'overridetype_id', 'overrides_count']
1606 def not_null_constraints(self):
1607 return ['overridetype']
1609 __all__.append('OverrideType')
1612 def get_override_type(override_type, session=None):
1614 Returns OverrideType object for given C{override type}.
1616 @type override_type: string
1617 @param override_type: The name of the override type
1619 @type session: Session
1620 @param session: Optional SQLA session object (a temporary one will be
1621 generated if not supplied)
1624 @return: the database id for the given override type
1627 q = session.query(OverrideType).filter_by(overridetype=override_type)
1631 except NoResultFound:
1634 __all__.append('get_override_type')
1636 ################################################################################
1638 class PolicyQueue(object):
1639 def __init__(self, *args, **kwargs):
1643 return '<PolicyQueue %s>' % self.queue_name
1645 __all__.append('PolicyQueue')
1648 def get_policy_queue(queuename, session=None):
1650 Returns PolicyQueue object for given C{queue name}
1652 @type queuename: string
1653 @param queuename: The name of the queue
1655 @type session: Session
1656 @param session: Optional SQLA session object (a temporary one will be
1657 generated if not supplied)
1660 @return: PolicyQueue object for the given queue
1663 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1667 except NoResultFound:
1670 __all__.append('get_policy_queue')
1672 ################################################################################
1674 class PolicyQueueUpload(object):
1675 def __cmp__(self, other):
1676 ret = cmp(self.changes.source, other.changes.source)
1678 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1680 if self.source is not None and other.source is None:
1682 elif self.source is None and other.source is not None:
1685 ret = cmp(self.changes.changesname, other.changes.changesname)
1688 __all__.append('PolicyQueueUpload')
1690 ################################################################################
1692 class PolicyQueueByhandFile(object):
1695 __all__.append('PolicyQueueByhandFile')
1697 ################################################################################
1699 class Priority(ORMObject):
1700 def __init__(self, priority = None, level = None):
1701 self.priority = priority
1704 def properties(self):
1705 return ['priority', 'priority_id', 'level', 'overrides_count']
1707 def not_null_constraints(self):
1708 return ['priority', 'level']
1710 def __eq__(self, val):
1711 if isinstance(val, str):
1712 return (self.priority == val)
1713 # This signals to use the normal comparison operator
1714 return NotImplemented
1716 def __ne__(self, val):
1717 if isinstance(val, str):
1718 return (self.priority != val)
1719 # This signals to use the normal comparison operator
1720 return NotImplemented
1722 __all__.append('Priority')
1725 def get_priority(priority, session=None):
1727 Returns Priority object for given C{priority name}.
1729 @type priority: string
1730 @param priority: The name of the priority
1732 @type session: Session
1733 @param session: Optional SQLA session object (a temporary one will be
1734 generated if not supplied)
1737 @return: Priority object for the given priority
1740 q = session.query(Priority).filter_by(priority=priority)
1744 except NoResultFound:
1747 __all__.append('get_priority')
1750 def get_priorities(session=None):
1752 Returns dictionary of priority names -> id mappings
1754 @type session: Session
1755 @param session: Optional SQL session object (a temporary one will be
1756 generated if not supplied)
1759 @return: dictionary of priority names -> id mappings
1763 q = session.query(Priority)
1765 ret[x.priority] = x.priority_id
1769 __all__.append('get_priorities')
1771 ################################################################################
1773 class Section(ORMObject):
1774 def __init__(self, section = None):
1775 self.section = section
1777 def properties(self):
1778 return ['section', 'section_id', 'overrides_count']
1780 def not_null_constraints(self):
1783 def __eq__(self, val):
1784 if isinstance(val, str):
1785 return (self.section == val)
1786 # This signals to use the normal comparison operator
1787 return NotImplemented
1789 def __ne__(self, val):
1790 if isinstance(val, str):
1791 return (self.section != val)
1792 # This signals to use the normal comparison operator
1793 return NotImplemented
1795 __all__.append('Section')
1798 def get_section(section, session=None):
1800 Returns Section object for given C{section name}.
1802 @type section: string
1803 @param section: The name of the section
1805 @type session: Session
1806 @param session: Optional SQLA session object (a temporary one will be
1807 generated if not supplied)
1810 @return: Section object for the given section name
1813 q = session.query(Section).filter_by(section=section)
1817 except NoResultFound:
1820 __all__.append('get_section')
1823 def get_sections(session=None):
1825 Returns dictionary of section names -> id mappings
1827 @type session: Session
1828 @param session: Optional SQL session object (a temporary one will be
1829 generated if not supplied)
1832 @return: dictionary of section names -> id mappings
1836 q = session.query(Section)
1838 ret[x.section] = x.section_id
1842 __all__.append('get_sections')
1844 ################################################################################
1846 class SignatureHistory(ORMObject):
1848 def from_signed_file(cls, signed_file):
1849 """signature history entry from signed file
1851 @type signed_file: L{daklib.gpg.SignedFile}
1852 @param signed_file: signed file
1854 @rtype: L{SignatureHistory}
1857 self.fingerprint = signed_file.primary_fingerprint
1858 self.signature_timestamp = signed_file.signature_timestamp
1859 self.contents_sha1 = signed_file.contents_sha1()
1862 __all__.append('SignatureHistory')
1864 ################################################################################
1866 class SrcContents(ORMObject):
1867 def __init__(self, file = None, source = None):
1869 self.source = source
1871 def properties(self):
1872 return ['file', 'source']
1874 __all__.append('SrcContents')
1876 ################################################################################
1878 from debian.debfile import Deb822
1880 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1881 class Dak822(Deb822):
1882 def _internal_parser(self, sequence, fields=None):
1883 # The key is non-whitespace, non-colon characters before any colon.
1884 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1885 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1886 multi = re.compile(key_part + r"$")
1887 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1889 wanted_field = lambda f: fields is None or f in fields
1891 if isinstance(sequence, basestring):
1892 sequence = sequence.splitlines()
1896 for line in self.gpg_stripped_paragraph(sequence):
1897 m = single.match(line)
1900 self[curkey] = content
1902 if not wanted_field(m.group('key')):
1906 curkey = m.group('key')
1907 content = m.group('data')
1910 m = multi.match(line)
1913 self[curkey] = content
1915 if not wanted_field(m.group('key')):
1919 curkey = m.group('key')
1923 m = multidata.match(line)
1925 content += '\n' + line # XXX not m.group('data')?
1929 self[curkey] = content
1932 class DBSource(ORMObject):
1933 def __init__(self, source = None, version = None, maintainer = None, \
1934 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1935 self.source = source
1936 self.version = version
1937 self.maintainer = maintainer
1938 self.changedby = changedby
1939 self.poolfile = poolfile
1940 self.install_date = install_date
1941 self.fingerprint = fingerprint
1945 return self.source_id
1947 def properties(self):
1948 return ['source', 'source_id', 'maintainer', 'changedby', \
1949 'fingerprint', 'poolfile', 'version', 'suites_count', \
1950 'install_date', 'binaries_count', 'uploaders_count']
1952 def not_null_constraints(self):
1953 return ['source', 'version', 'install_date', 'maintainer', \
1954 'changedby', 'poolfile']
1956 def read_control_fields(self):
1958 Reads the control information from a dsc
1961 @return: fields is the dsc information in a dictionary form
1963 fullpath = self.poolfile.fullpath
1964 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1967 metadata = association_proxy('key', 'value')
1969 def scan_contents(self):
1971 Returns a set of names for non directories. The path names are
1972 normalized after converting them from either utf-8 or iso8859-1
1975 fullpath = self.poolfile.fullpath
1976 from daklib.contents import UnpackedSource
1977 unpacked = UnpackedSource(fullpath)
1979 for name in unpacked.get_all_filenames():
1980 # enforce proper utf-8 encoding
1982 name.decode('utf-8')
1983 except UnicodeDecodeError:
1984 name = name.decode('iso8859-1').encode('utf-8')
1990 session = object_session(self)
1991 query = session.query(SourceMetadata).filter_by(source=self)
1992 return MetadataProxy(session, query)
1994 __all__.append('DBSource')
1997 def source_exists(source, source_version, suites = ["any"], session=None):
1999 Ensure that source exists somewhere in the archive for the binary
2000 upload being processed.
2001 1. exact match => 1.0-3
2002 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2004 @type source: string
2005 @param source: source name
2007 @type source_version: string
2008 @param source_version: expected source version
2011 @param suites: list of suites to check in, default I{any}
2013 @type session: Session
2014 @param session: Optional SQLA session object (a temporary one will be
2015 generated if not supplied)
2018 @return: returns 1 if a source with expected version is found, otherwise 0
2025 from daklib.regexes import re_bin_only_nmu
2026 orig_source_version = re_bin_only_nmu.sub('', source_version)
2028 for suite in suites:
2029 q = session.query(DBSource).filter_by(source=source). \
2030 filter(DBSource.version.in_([source_version, orig_source_version]))
2032 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2033 s = get_suite(suite, session)
2035 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2036 considered_suites = [ vc.reference for vc in enhances_vcs ]
2037 considered_suites.append(s)
2039 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2044 # No source found so return not ok
2049 __all__.append('source_exists')
2052 def get_suites_source_in(source, session=None):
2054 Returns list of Suite objects which given C{source} name is in
2057 @param source: DBSource package name to search for
2060 @return: list of Suite objects for the given source
2063 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2065 __all__.append('get_suites_source_in')
2068 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2070 Returns list of DBSource objects for given C{source} name and other parameters
2073 @param source: DBSource package name to search for
2075 @type version: str or None
2076 @param version: DBSource version name to search for or None if not applicable
2078 @type dm_upload_allowed: bool
2079 @param dm_upload_allowed: If None, no effect. If True or False, only
2080 return packages with that dm_upload_allowed setting
2082 @type session: Session
2083 @param session: Optional SQL session object (a temporary one will be
2084 generated if not supplied)
2087 @return: list of DBSource objects for the given name (may be empty)
2090 q = session.query(DBSource).filter_by(source=source)
2092 if version is not None:
2093 q = q.filter_by(version=version)
2095 if dm_upload_allowed is not None:
2096 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2100 __all__.append('get_sources_from_name')
2102 # FIXME: This function fails badly if it finds more than 1 source package and
2103 # its implementation is trivial enough to be inlined.
2105 def get_source_in_suite(source, suite_name, session=None):
2107 Returns a DBSource object for a combination of C{source} and C{suite_name}.
2109 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2110 - B{suite_name} - a suite name, eg. I{unstable}
2112 @type source: string
2113 @param source: source package name
2115 @type suite_name: string
2116 @param suite: the suite name
2119 @return: the version for I{source} in I{suite}
2122 suite = get_suite(suite_name, session)
2126 return suite.get_sources(source).one()
2127 except NoResultFound:
2130 __all__.append('get_source_in_suite')
2133 def import_metadata_into_db(obj, session=None):
2135 This routine works on either DBBinary or DBSource objects and imports
2136 their metadata into the database
2138 fields = obj.read_control_fields()
2139 for k in fields.keys():
2142 val = str(fields[k])
2143 except UnicodeEncodeError:
2144 # Fall back to UTF-8
2146 val = fields[k].encode('utf-8')
2147 except UnicodeEncodeError:
2148 # Finally try iso8859-1
2149 val = fields[k].encode('iso8859-1')
2150 # Otherwise we allow the exception to percolate up and we cause
2151 # a reject as someone is playing silly buggers
2153 obj.metadata[get_or_set_metadatakey(k, session)] = val
2155 session.commit_or_flush()
2157 __all__.append('import_metadata_into_db')
2159 ################################################################################
2161 class SrcFormat(object):
2162 def __init__(self, *args, **kwargs):
2166 return '<SrcFormat %s>' % (self.format_name)
2168 __all__.append('SrcFormat')
2170 ################################################################################
2172 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2173 ('SuiteID', 'suite_id'),
2174 ('Version', 'version'),
2175 ('Origin', 'origin'),
2177 ('Description', 'description'),
2178 ('Untouchable', 'untouchable'),
2179 ('Announce', 'announce'),
2180 ('Codename', 'codename'),
2181 ('OverrideCodename', 'overridecodename'),
2182 ('ValidTime', 'validtime'),
2183 ('Priority', 'priority'),
2184 ('NotAutomatic', 'notautomatic'),
2185 ('CopyChanges', 'copychanges'),
2186 ('OverrideSuite', 'overridesuite')]
2188 # Why the heck don't we have any UNIQUE constraints in table suite?
2189 # TODO: Add UNIQUE constraints for appropriate columns.
2190 class Suite(ORMObject):
2191 def __init__(self, suite_name = None, version = None):
2192 self.suite_name = suite_name
2193 self.version = version
2195 def properties(self):
2196 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2199 def not_null_constraints(self):
2200 return ['suite_name']
2202 def __eq__(self, val):
2203 if isinstance(val, str):
2204 return (self.suite_name == val)
2205 # This signals to use the normal comparison operator
2206 return NotImplemented
2208 def __ne__(self, val):
2209 if isinstance(val, str):
2210 return (self.suite_name != val)
2211 # This signals to use the normal comparison operator
2212 return NotImplemented
2216 for disp, field in SUITE_FIELDS:
2217 val = getattr(self, field, None)
2219 ret.append("%s: %s" % (disp, val))
2221 return "\n".join(ret)
2223 def get_architectures(self, skipsrc=False, skipall=False):
2225 Returns list of Architecture objects
2227 @type skipsrc: boolean
2228 @param skipsrc: Whether to skip returning the 'source' architecture entry
2231 @type skipall: boolean
2232 @param skipall: Whether to skip returning the 'all' architecture entry
2236 @return: list of Architecture objects for the given name (may be empty)
2239 q = object_session(self).query(Architecture).with_parent(self)
2241 q = q.filter(Architecture.arch_string != 'source')
2243 q = q.filter(Architecture.arch_string != 'all')
2244 return q.order_by(Architecture.arch_string).all()
2246 def get_sources(self, source):
2248 Returns a query object representing DBSource that is part of C{suite}.
2250 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2252 @type source: string
2253 @param source: source package name
2255 @rtype: sqlalchemy.orm.query.Query
2256 @return: a query of DBSource
2260 session = object_session(self)
2261 return session.query(DBSource).filter_by(source = source). \
2264 def get_overridesuite(self):
2265 if self.overridesuite is None:
2268 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2272 return os.path.join(self.archive.path, 'dists', self.suite_name)
2274 __all__.append('Suite')
2277 def get_suite(suite, session=None):
2279 Returns Suite object for given C{suite name}.
2282 @param suite: The name of the suite
2284 @type session: Session
2285 @param session: Optional SQLA session object (a temporary one will be
2286 generated if not supplied)
2289 @return: Suite object for the requested suite name (None if not present)
2292 q = session.query(Suite).filter_by(suite_name=suite)
2296 except NoResultFound:
2299 __all__.append('get_suite')
2301 ################################################################################
2304 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2306 Returns list of Architecture objects for given C{suite} name. The list is
2307 empty if suite does not exist.
2310 @param suite: Suite name to search for
2312 @type skipsrc: boolean
2313 @param skipsrc: Whether to skip returning the 'source' architecture entry
2316 @type skipall: boolean
2317 @param skipall: Whether to skip returning the 'all' architecture entry
2320 @type session: Session
2321 @param session: Optional SQL session object (a temporary one will be
2322 generated if not supplied)
2325 @return: list of Architecture objects for the given name (may be empty)
2329 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2330 except AttributeError:
2333 __all__.append('get_suite_architectures')
2335 ################################################################################
2337 class Uid(ORMObject):
2338 def __init__(self, uid = None, name = None):
2342 def __eq__(self, val):
2343 if isinstance(val, str):
2344 return (self.uid == val)
2345 # This signals to use the normal comparison operator
2346 return NotImplemented
2348 def __ne__(self, val):
2349 if isinstance(val, str):
2350 return (self.uid != val)
2351 # This signals to use the normal comparison operator
2352 return NotImplemented
2354 def properties(self):
2355 return ['uid', 'name', 'fingerprint']
2357 def not_null_constraints(self):
2360 __all__.append('Uid')
2363 def get_or_set_uid(uidname, session=None):
2365 Returns uid object for given uidname.
2367 If no matching uidname is found, a row is inserted.
2369 @type uidname: string
2370 @param uidname: The uid to add
2372 @type session: SQLAlchemy
2373 @param session: Optional SQL session object (a temporary one will be
2374 generated if not supplied). If not passed, a commit will be performed at
2375 the end of the function, otherwise the caller is responsible for commiting.
2378 @return: the uid object for the given uidname
2381 q = session.query(Uid).filter_by(uid=uidname)
2385 except NoResultFound:
2389 session.commit_or_flush()
2394 __all__.append('get_or_set_uid')
2397 def get_uid_from_fingerprint(fpr, session=None):
2398 q = session.query(Uid)
2399 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2403 except NoResultFound:
2406 __all__.append('get_uid_from_fingerprint')
2408 ################################################################################
2410 class MetadataKey(ORMObject):
2411 def __init__(self, key = None):
2414 def properties(self):
2417 def not_null_constraints(self):
2420 __all__.append('MetadataKey')
2423 def get_or_set_metadatakey(keyname, session=None):
2425 Returns MetadataKey object for given uidname.
2427 If no matching keyname is found, a row is inserted.
2429 @type uidname: string
2430 @param uidname: The keyname to add
2432 @type session: SQLAlchemy
2433 @param session: Optional SQL session object (a temporary one will be
2434 generated if not supplied). If not passed, a commit will be performed at
2435 the end of the function, otherwise the caller is responsible for commiting.
2438 @return: the metadatakey object for the given keyname
2441 q = session.query(MetadataKey).filter_by(key=keyname)
2445 except NoResultFound:
2446 ret = MetadataKey(keyname)
2448 session.commit_or_flush()
2452 __all__.append('get_or_set_metadatakey')
2454 ################################################################################
2456 class BinaryMetadata(ORMObject):
2457 def __init__(self, key = None, value = None, binary = None):
2460 self.binary = binary
2462 def properties(self):
2463 return ['binary', 'key', 'value']
2465 def not_null_constraints(self):
2468 __all__.append('BinaryMetadata')
2470 ################################################################################
2472 class SourceMetadata(ORMObject):
2473 def __init__(self, key = None, value = None, source = None):
2476 self.source = source
2478 def properties(self):
2479 return ['source', 'key', 'value']
2481 def not_null_constraints(self):
2484 __all__.append('SourceMetadata')
2486 ################################################################################
2488 class MetadataProxy(object):
2489 def __init__(self, session, query):
2490 self.session = session
2493 def _get(self, key):
2494 metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
2495 if metadata_key is None:
2497 metadata = self.query.filter_by(key=metadata_key).first()
2500 def __contains__(self, key):
2501 if self._get(key) is not None:
2505 def __getitem__(self, key):
2506 metadata = self._get(key)
2507 if metadata is None:
2509 return metadata.value
2511 def get(self, key, default=None):
2517 ################################################################################
2519 class VersionCheck(ORMObject):
2520 def __init__(self, *args, **kwargs):
2523 def properties(self):
2524 #return ['suite_id', 'check', 'reference_id']
2527 def not_null_constraints(self):
2528 return ['suite', 'check', 'reference']
2530 __all__.append('VersionCheck')
2533 def get_version_checks(suite_name, check = None, session = None):
2534 suite = get_suite(suite_name, session)
2536 # Make sure that what we return is iterable so that list comprehensions
2537 # involving this don't cause a traceback
2539 q = session.query(VersionCheck).filter_by(suite=suite)
2541 q = q.filter_by(check=check)
2544 __all__.append('get_version_checks')
2546 ################################################################################
2548 class DBConn(object):
2550 database module init.
2554 def __init__(self, *args, **kwargs):
2555 self.__dict__ = self.__shared_state
2557 if not getattr(self, 'initialised', False):
2558 self.initialised = True
2559 self.debug = kwargs.has_key('debug')
2562 def __setuptables(self):
2565 'acl_architecture_map',
2566 'acl_fingerprint_map',
2573 'binaries_metadata',
2581 'external_overrides',
2582 'extra_src_references',
2584 'files_archive_map',
2590 # TODO: the maintainer column in table override should be removed.
2594 'policy_queue_upload',
2595 'policy_queue_upload_binaries_map',
2596 'policy_queue_byhand_file',
2599 'signature_history',
2608 'suite_architectures',
2609 'suite_build_queue_copy',
2610 'suite_src_formats',
2616 'almost_obsolete_all_associations',
2617 'almost_obsolete_src_associations',
2618 'any_associations_source',
2619 'bin_associations_binaries',
2620 'binaries_suite_arch',
2623 'newest_all_associations',
2624 'newest_any_associations',
2626 'newest_src_association',
2627 'obsolete_all_associations',
2628 'obsolete_any_associations',
2629 'obsolete_any_by_all_associations',
2630 'obsolete_src_associations',
2632 'src_associations_bin',
2633 'src_associations_src',
2634 'suite_arch_by_name',
2637 for table_name in tables:
2638 table = Table(table_name, self.db_meta, \
2639 autoload=True, useexisting=True)
2640 setattr(self, 'tbl_%s' % table_name, table)
2642 for view_name in views:
2643 view = Table(view_name, self.db_meta, autoload=True)
2644 setattr(self, 'view_%s' % view_name, view)
2646 def __setupmappers(self):
2647 mapper(Architecture, self.tbl_architecture,
2648 properties = dict(arch_id = self.tbl_architecture.c.id,
2649 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2650 order_by=self.tbl_suite.c.suite_name,
2651 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2652 extension = validator)
2654 mapper(ACL, self.tbl_acl,
2656 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2657 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2658 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2659 per_source = relation(ACLPerSource, collection_class=set),
2662 mapper(ACLPerSource, self.tbl_acl_per_source,
2664 acl = relation(ACL),
2665 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2666 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2669 mapper(Archive, self.tbl_archive,
2670 properties = dict(archive_id = self.tbl_archive.c.id,
2671 archive_name = self.tbl_archive.c.name))
2673 mapper(ArchiveFile, self.tbl_files_archive_map,
2674 properties = dict(archive = relation(Archive, backref='files'),
2675 component = relation(Component),
2676 file = relation(PoolFile, backref='archives')))
2678 mapper(BuildQueue, self.tbl_build_queue,
2679 properties = dict(queue_id = self.tbl_build_queue.c.id,
2680 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2682 mapper(DBBinary, self.tbl_binaries,
2683 properties = dict(binary_id = self.tbl_binaries.c.id,
2684 package = self.tbl_binaries.c.package,
2685 version = self.tbl_binaries.c.version,
2686 maintainer_id = self.tbl_binaries.c.maintainer,
2687 maintainer = relation(Maintainer),
2688 source_id = self.tbl_binaries.c.source,
2689 source = relation(DBSource, backref='binaries'),
2690 arch_id = self.tbl_binaries.c.architecture,
2691 architecture = relation(Architecture),
2692 poolfile_id = self.tbl_binaries.c.file,
2693 poolfile = relation(PoolFile),
2694 binarytype = self.tbl_binaries.c.type,
2695 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2696 fingerprint = relation(Fingerprint),
2697 install_date = self.tbl_binaries.c.install_date,
2698 suites = relation(Suite, secondary=self.tbl_bin_associations,
2699 backref=backref('binaries', lazy='dynamic')),
2700 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2701 backref=backref('extra_binary_references', lazy='dynamic')),
2702 key = relation(BinaryMetadata, cascade='all',
2703 collection_class=attribute_mapped_collection('key'))),
2704 extension = validator)
2706 mapper(Component, self.tbl_component,
2707 properties = dict(component_id = self.tbl_component.c.id,
2708 component_name = self.tbl_component.c.name),
2709 extension = validator)
2711 mapper(DBConfig, self.tbl_config,
2712 properties = dict(config_id = self.tbl_config.c.id))
2714 mapper(DSCFile, self.tbl_dsc_files,
2715 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2716 source_id = self.tbl_dsc_files.c.source,
2717 source = relation(DBSource),
2718 poolfile_id = self.tbl_dsc_files.c.file,
2719 poolfile = relation(PoolFile)))
2721 mapper(ExternalOverride, self.tbl_external_overrides,
2723 suite_id = self.tbl_external_overrides.c.suite,
2724 suite = relation(Suite),
2725 component_id = self.tbl_external_overrides.c.component,
2726 component = relation(Component)))
2728 mapper(PoolFile, self.tbl_files,
2729 properties = dict(file_id = self.tbl_files.c.id,
2730 filesize = self.tbl_files.c.size),
2731 extension = validator)
2733 mapper(Fingerprint, self.tbl_fingerprint,
2734 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2735 uid_id = self.tbl_fingerprint.c.uid,
2736 uid = relation(Uid),
2737 keyring_id = self.tbl_fingerprint.c.keyring,
2738 keyring = relation(Keyring),
2739 acl = relation(ACL)),
2740 extension = validator)
2742 mapper(Keyring, self.tbl_keyrings,
2743 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2744 keyring_id = self.tbl_keyrings.c.id,
2745 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2747 mapper(DBChange, self.tbl_changes,
2748 properties = dict(change_id = self.tbl_changes.c.id,
2749 seen = self.tbl_changes.c.seen,
2750 source = self.tbl_changes.c.source,
2751 binaries = self.tbl_changes.c.binaries,
2752 architecture = self.tbl_changes.c.architecture,
2753 distribution = self.tbl_changes.c.distribution,
2754 urgency = self.tbl_changes.c.urgency,
2755 maintainer = self.tbl_changes.c.maintainer,
2756 changedby = self.tbl_changes.c.changedby,
2757 date = self.tbl_changes.c.date,
2758 version = self.tbl_changes.c.version))
2760 mapper(Maintainer, self.tbl_maintainer,
2761 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2762 maintains_sources = relation(DBSource, backref='maintainer',
2763 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2764 changed_sources = relation(DBSource, backref='changedby',
2765 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2766 extension = validator)
2768 mapper(NewComment, self.tbl_new_comments,
2769 properties = dict(comment_id = self.tbl_new_comments.c.id,
2770 policy_queue = relation(PolicyQueue)))
2772 mapper(Override, self.tbl_override,
2773 properties = dict(suite_id = self.tbl_override.c.suite,
2774 suite = relation(Suite, \
2775 backref=backref('overrides', lazy='dynamic')),
2776 package = self.tbl_override.c.package,
2777 component_id = self.tbl_override.c.component,
2778 component = relation(Component, \
2779 backref=backref('overrides', lazy='dynamic')),
2780 priority_id = self.tbl_override.c.priority,
2781 priority = relation(Priority, \
2782 backref=backref('overrides', lazy='dynamic')),
2783 section_id = self.tbl_override.c.section,
2784 section = relation(Section, \
2785 backref=backref('overrides', lazy='dynamic')),
2786 overridetype_id = self.tbl_override.c.type,
2787 overridetype = relation(OverrideType, \
2788 backref=backref('overrides', lazy='dynamic'))))
2790 mapper(OverrideType, self.tbl_override_type,
2791 properties = dict(overridetype = self.tbl_override_type.c.type,
2792 overridetype_id = self.tbl_override_type.c.id))
2794 mapper(PolicyQueue, self.tbl_policy_queue,
2795 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2796 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2798 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2800 changes = relation(DBChange),
2801 policy_queue = relation(PolicyQueue, backref='uploads'),
2802 target_suite = relation(Suite),
2803 source = relation(DBSource),
2804 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2807 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2809 upload = relation(PolicyQueueUpload, backref='byhand'),
2813 mapper(Priority, self.tbl_priority,
2814 properties = dict(priority_id = self.tbl_priority.c.id))
2816 mapper(Section, self.tbl_section,
2817 properties = dict(section_id = self.tbl_section.c.id,
2818 section=self.tbl_section.c.section))
2820 mapper(SignatureHistory, self.tbl_signature_history)
2822 mapper(DBSource, self.tbl_source,
2823 properties = dict(source_id = self.tbl_source.c.id,
2824 version = self.tbl_source.c.version,
2825 maintainer_id = self.tbl_source.c.maintainer,
2826 poolfile_id = self.tbl_source.c.file,
2827 poolfile = relation(PoolFile),
2828 fingerprint_id = self.tbl_source.c.sig_fpr,
2829 fingerprint = relation(Fingerprint),
2830 changedby_id = self.tbl_source.c.changedby,
2831 srcfiles = relation(DSCFile,
2832 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2833 suites = relation(Suite, secondary=self.tbl_src_associations,
2834 backref=backref('sources', lazy='dynamic')),
2835 uploaders = relation(Maintainer,
2836 secondary=self.tbl_src_uploaders),
2837 key = relation(SourceMetadata, cascade='all',
2838 collection_class=attribute_mapped_collection('key'))),
2839 extension = validator)
2841 mapper(SrcFormat, self.tbl_src_format,
2842 properties = dict(src_format_id = self.tbl_src_format.c.id,
2843 format_name = self.tbl_src_format.c.format_name))
2845 mapper(Suite, self.tbl_suite,
2846 properties = dict(suite_id = self.tbl_suite.c.id,
2847 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2848 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2849 copy_queues = relation(BuildQueue,
2850 secondary=self.tbl_suite_build_queue_copy),
2851 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2852 backref=backref('suites', lazy='dynamic')),
2853 archive = relation(Archive, backref='suites'),
2854 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2855 components = relation(Component, secondary=self.tbl_component_suite,
2856 order_by=self.tbl_component.c.ordering,
2857 backref=backref('suites'))),
2858 extension = validator)
2860 mapper(Uid, self.tbl_uid,
2861 properties = dict(uid_id = self.tbl_uid.c.id,
2862 fingerprint = relation(Fingerprint)),
2863 extension = validator)
2865 mapper(BinContents, self.tbl_bin_contents,
2867 binary = relation(DBBinary,
2868 backref=backref('contents', lazy='dynamic', cascade='all')),
2869 file = self.tbl_bin_contents.c.file))
2871 mapper(SrcContents, self.tbl_src_contents,
2873 source = relation(DBSource,
2874 backref=backref('contents', lazy='dynamic', cascade='all')),
2875 file = self.tbl_src_contents.c.file))
2877 mapper(MetadataKey, self.tbl_metadata_keys,
2879 key_id = self.tbl_metadata_keys.c.key_id,
2880 key = self.tbl_metadata_keys.c.key))
2882 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2884 binary_id = self.tbl_binaries_metadata.c.bin_id,
2885 binary = relation(DBBinary),
2886 key_id = self.tbl_binaries_metadata.c.key_id,
2887 key = relation(MetadataKey),
2888 value = self.tbl_binaries_metadata.c.value))
2890 mapper(SourceMetadata, self.tbl_source_metadata,
2892 source_id = self.tbl_source_metadata.c.src_id,
2893 source = relation(DBSource),
2894 key_id = self.tbl_source_metadata.c.key_id,
2895 key = relation(MetadataKey),
2896 value = self.tbl_source_metadata.c.value))
2898 mapper(VersionCheck, self.tbl_version_check,
2900 suite_id = self.tbl_version_check.c.suite,
2901 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2902 reference_id = self.tbl_version_check.c.reference,
2903 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2905 ## Connection functions
2906 def __createconn(self):
2907 from config import Config
2909 if cnf.has_key("DB::Service"):
2910 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2911 elif cnf.has_key("DB::Host"):
2913 connstr = "postgresql://%s" % cnf["DB::Host"]
2914 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2915 connstr += ":%s" % cnf["DB::Port"]
2916 connstr += "/%s" % cnf["DB::Name"]
2919 connstr = "postgresql:///%s" % cnf["DB::Name"]
2920 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2921 connstr += "?port=%s" % cnf["DB::Port"]
2923 engine_args = { 'echo': self.debug }
2924 if cnf.has_key('DB::PoolSize'):
2925 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2926 if cnf.has_key('DB::MaxOverflow'):
2927 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2928 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2929 cnf['DB::Unicode'] == 'false':
2930 engine_args['use_native_unicode'] = False
2932 # Monkey patch a new dialect in in order to support service= syntax
2933 import sqlalchemy.dialects.postgresql
2934 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2935 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2936 def create_connect_args(self, url):
2937 if str(url).startswith('postgresql://service='):
2939 servicename = str(url)[21:]
2940 return (['service=%s' % servicename], {})
2942 return PGDialect_psycopg2.create_connect_args(self, url)
2944 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2947 self.db_pg = create_engine(connstr, **engine_args)
2948 self.db_meta = MetaData()
2949 self.db_meta.bind = self.db_pg
2950 self.db_smaker = sessionmaker(bind=self.db_pg,
2954 self.__setuptables()
2955 self.__setupmappers()
2957 except OperationalError as e:
2959 utils.fubar("Cannot connect to database (%s)" % str(e))
2961 self.pid = os.getpid()
2963 def session(self, work_mem = 0):
2965 Returns a new session object. If a work_mem parameter is provided a new
2966 transaction is started and the work_mem parameter is set for this
2967 transaction. The work_mem parameter is measured in MB. A default value
2968 will be used if the parameter is not set.
2970 # reinitialize DBConn in new processes
2971 if self.pid != os.getpid():
2974 session = self.db_smaker()
2976 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2979 __all__.append('DBConn')