5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 import daklib.daksubprocess
39 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from tarfile import TarFile
57 from inspect import getargspec
60 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
62 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
63 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
64 from sqlalchemy import types as sqltypes
65 from sqlalchemy.orm.collections import attribute_mapped_collection
66 from sqlalchemy.ext.associationproxy import association_proxy
68 # Don't remove this, we re-export the exceptions to scripts which import us
69 from sqlalchemy.exc import *
70 from sqlalchemy.orm.exc import NoResultFound
72 # Only import Config until Queue stuff is changed to store its config
74 from config import Config
75 from textutils import fix_maintainer
76 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
78 # suppress some deprecation warnings in squeeze related to sqlalchemy
80 warnings.filterwarnings('ignore', \
81 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
83 warnings.filterwarnings('ignore', \
84 "Predicate of partial index .* ignored during reflection", \
88 ################################################################################
90 # Patch in support for the debversion field type so that it works during
94 # that is for sqlalchemy 0.6
95 UserDefinedType = sqltypes.UserDefinedType
97 # this one for sqlalchemy 0.5
98 UserDefinedType = sqltypes.TypeEngine
100 class DebVersion(UserDefinedType):
101 def get_col_spec(self):
104 def bind_processor(self, dialect):
107 # ' = None' is needed for sqlalchemy 0.5:
108 def result_processor(self, dialect, coltype = None):
111 sa_major_version = sqlalchemy.__version__[0:3]
112 if sa_major_version in ["0.5", "0.6", "0.7", "0.8", "0.9"]:
113 from sqlalchemy.databases import postgres
114 postgres.ischema_names['debversion'] = DebVersion
116 raise Exception("dak only ported to SQLA versions 0.5 to 0.9. See daklib/dbconn.py")
118 ################################################################################
120 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
122 ################################################################################
124 def session_wrapper(fn):
126 Wrapper around common ".., session=None):" handling. If the wrapped
127 function is called without passing 'session', we create a local one
128 and destroy it when the function ends.
130 Also attaches a commit_or_flush method to the session; if we created a
131 local session, this is a synonym for session.commit(), otherwise it is a
132 synonym for session.flush().
135 def wrapped(*args, **kwargs):
136 private_transaction = False
138 # Find the session object
139 session = kwargs.get('session')
142 if len(args) <= len(getargspec(fn)[0]) - 1:
143 # No session specified as last argument or in kwargs
144 private_transaction = True
145 session = kwargs['session'] = DBConn().session()
147 # Session is last argument in args
151 session = args[-1] = DBConn().session()
152 private_transaction = True
154 if private_transaction:
155 session.commit_or_flush = session.commit
157 session.commit_or_flush = session.flush
160 return fn(*args, **kwargs)
162 if private_transaction:
163 # We created a session; close it.
166 wrapped.__doc__ = fn.__doc__
167 wrapped.func_name = fn.func_name
171 __all__.append('session_wrapper')
173 ################################################################################
175 class ORMObject(object):
177 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
178 derived classes must implement the properties() method.
181 def properties(self):
183 This method should be implemented by all derived classes and returns a
184 list of the important properties. The properties 'created' and
185 'modified' will be added automatically. A suffix '_count' should be
186 added to properties that are lists or query objects. The most important
187 property name should be returned as the first element in the list
188 because it is used by repr().
194 Returns a JSON representation of the object based on the properties
195 returned from the properties() method.
198 # add created and modified
199 all_properties = self.properties() + ['created', 'modified']
200 for property in all_properties:
201 # check for list or query
202 if property[-6:] == '_count':
203 real_property = property[:-6]
204 if not hasattr(self, real_property):
206 value = getattr(self, real_property)
207 if hasattr(value, '__len__'):
210 elif hasattr(value, 'count'):
211 # query (but not during validation)
212 if self.in_validation:
214 value = value.count()
216 raise KeyError('Do not understand property %s.' % property)
218 if not hasattr(self, property):
221 value = getattr(self, property)
225 elif isinstance(value, ORMObject):
226 # use repr() for ORMObject types
229 # we want a string for all other types because json cannot
232 data[property] = value
233 return json.dumps(data)
237 Returns the name of the class.
239 return type(self).__name__
243 Returns a short string representation of the object using the first
244 element from the properties() method.
246 primary_property = self.properties()[0]
247 value = getattr(self, primary_property)
248 return '<%s %s>' % (self.classname(), str(value))
252 Returns a human readable form of the object using the properties()
255 return '<%s %s>' % (self.classname(), self.json())
257 def not_null_constraints(self):
259 Returns a list of properties that must be not NULL. Derived classes
260 should override this method if needed.
264 validation_message = \
265 "Validation failed because property '%s' must not be empty in object\n%s"
267 in_validation = False
271 This function validates the not NULL constraints as returned by
272 not_null_constraints(). It raises the DBUpdateError exception if
275 for property in self.not_null_constraints():
276 # TODO: It is a bit awkward that the mapper configuration allow
277 # directly setting the numeric _id columns. We should get rid of it
279 if hasattr(self, property + '_id') and \
280 getattr(self, property + '_id') is not None:
282 if not hasattr(self, property) or getattr(self, property) is None:
283 # str() might lead to races due to a 2nd flush
284 self.in_validation = True
285 message = self.validation_message % (property, str(self))
286 self.in_validation = False
287 raise DBUpdateError(message)
291 def get(cls, primary_key, session = None):
293 This is a support function that allows getting an object by its primary
296 Architecture.get(3[, session])
298 instead of the more verbose
300 session.query(Architecture).get(3)
302 return session.query(cls).get(primary_key)
304 def session(self, replace = False):
306 Returns the current session that is associated with the object. May
307 return None is object is in detached state.
310 return object_session(self)
312 def clone(self, session = None):
314 Clones the current object in a new session and returns the new clone. A
315 fresh session is created if the optional session parameter is not
316 provided. The function will fail if a session is provided and has
319 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
320 an existing object to allow several threads to work with their own
321 instances of an ORMObject.
323 WARNING: Only persistent (committed) objects can be cloned. Changes
324 made to the original object that are not committed yet will get lost.
325 The session of the new object will always be rolled back to avoid
329 if self.session() is None:
330 raise RuntimeError( \
331 'Method clone() failed for detached object:\n%s' % self)
332 self.session().flush()
333 mapper = object_mapper(self)
334 primary_key = mapper.primary_key_from_instance(self)
335 object_class = self.__class__
337 session = DBConn().session()
338 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
339 raise RuntimeError( \
340 'Method clone() failed due to unflushed changes in session.')
341 new_object = session.query(object_class).get(primary_key)
343 if new_object is None:
344 raise RuntimeError( \
345 'Method clone() failed for non-persistent object:\n%s' % self)
348 __all__.append('ORMObject')
350 ################################################################################
352 class Validator(MapperExtension):
354 This class calls the validate() method for each instance for the
355 'before_update' and 'before_insert' events. A global object validator is
356 used for configuring the individual mappers.
359 def before_update(self, mapper, connection, instance):
363 def before_insert(self, mapper, connection, instance):
367 validator = Validator()
369 ################################################################################
371 class ACL(ORMObject):
373 return "<ACL {0}>".format(self.name)
375 __all__.append('ACL')
377 class ACLPerSource(ORMObject):
379 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
381 __all__.append('ACLPerSource')
383 ################################################################################
385 class Architecture(ORMObject):
386 def __init__(self, arch_string = None, description = None):
387 self.arch_string = arch_string
388 self.description = description
390 def __eq__(self, val):
391 if isinstance(val, str):
392 return (self.arch_string== val)
393 # This signals to use the normal comparison operator
394 return NotImplemented
396 def __ne__(self, val):
397 if isinstance(val, str):
398 return (self.arch_string != val)
399 # This signals to use the normal comparison operator
400 return NotImplemented
402 def properties(self):
403 return ['arch_string', 'arch_id', 'suites_count']
405 def not_null_constraints(self):
406 return ['arch_string']
408 __all__.append('Architecture')
411 def get_architecture(architecture, session=None):
413 Returns database id for given C{architecture}.
415 @type architecture: string
416 @param architecture: The name of the architecture
418 @type session: Session
419 @param session: Optional SQLA session object (a temporary one will be
420 generated if not supplied)
423 @return: Architecture object for the given arch (None if not present)
426 q = session.query(Architecture).filter_by(arch_string=architecture)
430 except NoResultFound:
433 __all__.append('get_architecture')
435 ################################################################################
437 class Archive(object):
438 def __init__(self, *args, **kwargs):
442 return '<Archive %s>' % self.archive_name
444 __all__.append('Archive')
447 def get_archive(archive, session=None):
449 returns database id for given C{archive}.
451 @type archive: string
452 @param archive: the name of the arhive
454 @type session: Session
455 @param session: Optional SQLA session object (a temporary one will be
456 generated if not supplied)
459 @return: Archive object for the given name (None if not present)
462 archive = archive.lower()
464 q = session.query(Archive).filter_by(archive_name=archive)
468 except NoResultFound:
471 __all__.append('get_archive')
473 ################################################################################
475 class ArchiveFile(object):
476 def __init__(self, archive=None, component=None, file=None):
477 self.archive = archive
478 self.component = component
482 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
484 __all__.append('ArchiveFile')
486 ################################################################################
488 class BinContents(ORMObject):
489 def __init__(self, file = None, binary = None):
493 def properties(self):
494 return ['file', 'binary']
496 __all__.append('BinContents')
498 ################################################################################
500 class DBBinary(ORMObject):
501 def __init__(self, package = None, source = None, version = None, \
502 maintainer = None, architecture = None, poolfile = None, \
503 binarytype = 'deb', fingerprint=None):
504 self.package = package
506 self.version = version
507 self.maintainer = maintainer
508 self.architecture = architecture
509 self.poolfile = poolfile
510 self.binarytype = binarytype
511 self.fingerprint = fingerprint
515 return self.binary_id
517 def properties(self):
518 return ['package', 'version', 'maintainer', 'source', 'architecture', \
519 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
520 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
522 def not_null_constraints(self):
523 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
526 metadata = association_proxy('key', 'value')
528 def scan_contents(self):
530 Yields the contents of the package. Only regular files are yielded and
531 the path names are normalized after converting them from either utf-8
532 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
533 package does not contain any regular file.
535 fullpath = self.poolfile.fullpath
536 dpkg_cmd = ('dpkg-deb', '--fsys-tarfile', fullpath)
537 dpkg = daklib.daksubprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE)
538 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
539 for member in tar.getmembers():
540 if not member.isdir():
541 name = normpath(member.name)
542 # enforce proper utf-8 encoding
545 except UnicodeDecodeError:
546 name = name.decode('iso8859-1').encode('utf-8')
552 def read_control(self):
554 Reads the control information from a binary.
557 @return: stanza text of the control section.
560 fullpath = self.poolfile.fullpath
561 with open(fullpath, 'r') as deb_file:
562 return utils.deb_extract_control(deb_file)
564 def read_control_fields(self):
566 Reads the control information from a binary and return
570 @return: fields of the control section as a dictionary.
572 stanza = self.read_control()
573 return apt_pkg.TagSection(stanza)
577 session = object_session(self)
578 query = session.query(BinaryMetadata).filter_by(binary=self)
579 return MetadataProxy(session, query)
581 __all__.append('DBBinary')
584 def get_suites_binary_in(package, session=None):
586 Returns list of Suite objects which given C{package} name is in
589 @param package: DBBinary package name to search for
592 @return: list of Suite objects for the given package
595 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
597 __all__.append('get_suites_binary_in')
600 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
602 Returns the component name of the newest binary package in suite_list or
603 None if no package is found. The result can be optionally filtered by a list
604 of architecture names.
607 @param package: DBBinary package name to search for
609 @type suite_list: list of str
610 @param suite_list: list of suite_name items
612 @type arch_list: list of str
613 @param arch_list: optional list of arch_string items that defaults to []
615 @rtype: str or NoneType
616 @return: name of component or None
619 q = session.query(DBBinary).filter_by(package = package). \
620 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
621 if len(arch_list) > 0:
622 q = q.join(DBBinary.architecture). \
623 filter(Architecture.arch_string.in_(arch_list))
624 binary = q.order_by(desc(DBBinary.version)).first()
628 return binary.poolfile.component.component_name
630 __all__.append('get_component_by_package_suite')
632 ################################################################################
634 class BuildQueue(object):
635 def __init__(self, *args, **kwargs):
639 return '<BuildQueue %s>' % self.queue_name
641 __all__.append('BuildQueue')
643 ################################################################################
645 class Component(ORMObject):
646 def __init__(self, component_name = None):
647 self.component_name = component_name
649 def __eq__(self, val):
650 if isinstance(val, str):
651 return (self.component_name == val)
652 # This signals to use the normal comparison operator
653 return NotImplemented
655 def __ne__(self, val):
656 if isinstance(val, str):
657 return (self.component_name != val)
658 # This signals to use the normal comparison operator
659 return NotImplemented
661 def properties(self):
662 return ['component_name', 'component_id', 'description', \
663 'meets_dfsg', 'overrides_count']
665 def not_null_constraints(self):
666 return ['component_name']
669 __all__.append('Component')
672 def get_component(component, session=None):
674 Returns database id for given C{component}.
676 @type component: string
677 @param component: The name of the override type
680 @return: the database id for the given component
683 component = component.lower()
685 q = session.query(Component).filter_by(component_name=component)
689 except NoResultFound:
692 __all__.append('get_component')
695 def get_mapped_component(component_name, session=None):
696 """get component after mappings
698 Evaluate component mappings from ComponentMappings in dak.conf for the
699 given component name.
701 @todo: ansgar wants to get rid of this. It's currently only used for
704 @type component_name: str
705 @param component_name: component name
707 @param session: database session
709 @rtype: L{daklib.dbconn.Component} or C{None}
710 @return: component after applying maps or C{None}
713 for m in cnf.value_list("ComponentMappings"):
714 (src, dst) = m.split()
715 if component_name == src:
717 component = session.query(Component).filter_by(component_name=component_name).first()
720 __all__.append('get_mapped_component')
723 def get_component_names(session=None):
725 Returns list of strings of component names.
728 @return: list of strings of component names
731 return [ x.component_name for x in session.query(Component).all() ]
733 __all__.append('get_component_names')
735 ################################################################################
737 class DBConfig(object):
738 def __init__(self, *args, **kwargs):
742 return '<DBConfig %s>' % self.name
744 __all__.append('DBConfig')
746 ################################################################################
749 def get_or_set_contents_file_id(filename, session=None):
751 Returns database id for given filename.
753 If no matching file is found, a row is inserted.
755 @type filename: string
756 @param filename: The filename
757 @type session: SQLAlchemy
758 @param session: Optional SQL session object (a temporary one will be
759 generated if not supplied). If not passed, a commit will be performed at
760 the end of the function, otherwise the caller is responsible for commiting.
763 @return: the database id for the given component
766 q = session.query(ContentFilename).filter_by(filename=filename)
769 ret = q.one().cafilename_id
770 except NoResultFound:
771 cf = ContentFilename()
772 cf.filename = filename
774 session.commit_or_flush()
775 ret = cf.cafilename_id
779 __all__.append('get_or_set_contents_file_id')
782 def get_contents(suite, overridetype, section=None, session=None):
784 Returns contents for a suite / overridetype combination, limiting
785 to a section if not None.
788 @param suite: Suite object
790 @type overridetype: OverrideType
791 @param overridetype: OverrideType object
793 @type section: Section
794 @param section: Optional section object to limit results to
796 @type session: SQLAlchemy
797 @param session: Optional SQL session object (a temporary one will be
798 generated if not supplied)
801 @return: ResultsProxy object set up to return tuples of (filename, section,
805 # find me all of the contents for a given suite
806 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
810 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
811 JOIN content_file_names n ON (c.filename=n.id)
812 JOIN binaries b ON (b.id=c.binary_pkg)
813 JOIN override o ON (o.package=b.package)
814 JOIN section s ON (s.id=o.section)
815 WHERE o.suite = :suiteid AND o.type = :overridetypeid
816 AND b.type=:overridetypename"""
818 vals = {'suiteid': suite.suite_id,
819 'overridetypeid': overridetype.overridetype_id,
820 'overridetypename': overridetype.overridetype}
822 if section is not None:
823 contents_q += " AND s.id = :sectionid"
824 vals['sectionid'] = section.section_id
826 contents_q += " ORDER BY fn"
828 return session.execute(contents_q, vals)
830 __all__.append('get_contents')
832 ################################################################################
834 class ContentFilepath(object):
835 def __init__(self, *args, **kwargs):
839 return '<ContentFilepath %s>' % self.filepath
841 __all__.append('ContentFilepath')
844 def get_or_set_contents_path_id(filepath, session=None):
846 Returns database id for given path.
848 If no matching file is found, a row is inserted.
850 @type filepath: string
851 @param filepath: The filepath
853 @type session: SQLAlchemy
854 @param session: Optional SQL session object (a temporary one will be
855 generated if not supplied). If not passed, a commit will be performed at
856 the end of the function, otherwise the caller is responsible for commiting.
859 @return: the database id for the given path
862 q = session.query(ContentFilepath).filter_by(filepath=filepath)
865 ret = q.one().cafilepath_id
866 except NoResultFound:
867 cf = ContentFilepath()
868 cf.filepath = filepath
870 session.commit_or_flush()
871 ret = cf.cafilepath_id
875 __all__.append('get_or_set_contents_path_id')
877 ################################################################################
879 class ContentAssociation(object):
880 def __init__(self, *args, **kwargs):
884 return '<ContentAssociation %s>' % self.ca_id
886 __all__.append('ContentAssociation')
888 def insert_content_paths(binary_id, fullpaths, session=None):
890 Make sure given path is associated with given binary id
893 @param binary_id: the id of the binary
894 @type fullpaths: list
895 @param fullpaths: the list of paths of the file being associated with the binary
896 @type session: SQLAlchemy session
897 @param session: Optional SQLAlchemy session. If this is passed, the caller
898 is responsible for ensuring a transaction has begun and committing the
899 results or rolling back based on the result code. If not passed, a commit
900 will be performed at the end of the function, otherwise the caller is
901 responsible for commiting.
903 @return: True upon success
908 session = DBConn().session()
913 def generate_path_dicts():
914 for fullpath in fullpaths:
915 if fullpath.startswith( './' ):
916 fullpath = fullpath[2:]
918 yield {'filename':fullpath, 'id': binary_id }
920 for d in generate_path_dicts():
921 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
930 traceback.print_exc()
932 # Only rollback if we set up the session ourself
939 __all__.append('insert_content_paths')
941 ################################################################################
943 class DSCFile(object):
944 def __init__(self, *args, **kwargs):
948 return '<DSCFile %s>' % self.dscfile_id
950 __all__.append('DSCFile')
953 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
955 Returns a list of DSCFiles which may be empty
957 @type dscfile_id: int (optional)
958 @param dscfile_id: the dscfile_id of the DSCFiles to find
960 @type source_id: int (optional)
961 @param source_id: the source id related to the DSCFiles to find
963 @type poolfile_id: int (optional)
964 @param poolfile_id: the poolfile id related to the DSCFiles to find
967 @return: Possibly empty list of DSCFiles
970 q = session.query(DSCFile)
972 if dscfile_id is not None:
973 q = q.filter_by(dscfile_id=dscfile_id)
975 if source_id is not None:
976 q = q.filter_by(source_id=source_id)
978 if poolfile_id is not None:
979 q = q.filter_by(poolfile_id=poolfile_id)
983 __all__.append('get_dscfiles')
985 ################################################################################
987 class ExternalOverride(ORMObject):
988 def __init__(self, *args, **kwargs):
992 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
994 __all__.append('ExternalOverride')
996 ################################################################################
998 class PoolFile(ORMObject):
999 def __init__(self, filename = None, filesize = -1, \
1001 self.filename = filename
1002 self.filesize = filesize
1003 self.md5sum = md5sum
1007 session = DBConn().session().object_session(self)
1008 af = session.query(ArchiveFile).join(Archive) \
1009 .filter(ArchiveFile.file == self) \
1010 .order_by(Archive.tainted.desc()).first()
1014 def component(self):
1015 session = DBConn().session().object_session(self)
1016 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1017 .group_by(ArchiveFile.component_id).one()
1018 return session.query(Component).get(component_id)
1022 return os.path.basename(self.filename)
1024 def is_valid(self, filesize = -1, md5sum = None):
1025 return self.filesize == long(filesize) and self.md5sum == md5sum
1027 def properties(self):
1028 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1029 'sha256sum', 'source', 'binary', 'last_used']
1031 def not_null_constraints(self):
1032 return ['filename', 'md5sum']
1034 def identical_to(self, filename):
1036 compare size and hash with the given file
1039 @return: true if the given file has the same size and hash as this object; false otherwise
1041 st = os.stat(filename)
1042 if self.filesize != st.st_size:
1045 f = open(filename, "r")
1046 sha256sum = apt_pkg.sha256sum(f)
1047 if sha256sum != self.sha256sum:
1052 __all__.append('PoolFile')
1055 def get_poolfile_like_name(filename, session=None):
1057 Returns an array of PoolFile objects which are like the given name
1059 @type filename: string
1060 @param filename: the filename of the file to check against the DB
1063 @return: array of PoolFile objects
1066 # TODO: There must be a way of properly using bind parameters with %FOO%
1067 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1071 __all__.append('get_poolfile_like_name')
1073 ################################################################################
1075 class Fingerprint(ORMObject):
1076 def __init__(self, fingerprint = None):
1077 self.fingerprint = fingerprint
1079 def properties(self):
1080 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1083 def not_null_constraints(self):
1084 return ['fingerprint']
1086 __all__.append('Fingerprint')
1089 def get_fingerprint(fpr, session=None):
1091 Returns Fingerprint object for given fpr.
1094 @param fpr: The fpr to find / add
1096 @type session: SQLAlchemy
1097 @param session: Optional SQL session object (a temporary one will be
1098 generated if not supplied).
1101 @return: the Fingerprint object for the given fpr or None
1104 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1108 except NoResultFound:
1113 __all__.append('get_fingerprint')
1116 def get_or_set_fingerprint(fpr, session=None):
1118 Returns Fingerprint object for given fpr.
1120 If no matching fpr is found, a row is inserted.
1123 @param fpr: The fpr to find / add
1125 @type session: SQLAlchemy
1126 @param session: Optional SQL session object (a temporary one will be
1127 generated if not supplied). If not passed, a commit will be performed at
1128 the end of the function, otherwise the caller is responsible for commiting.
1129 A flush will be performed either way.
1132 @return: the Fingerprint object for the given fpr
1135 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1139 except NoResultFound:
1140 fingerprint = Fingerprint()
1141 fingerprint.fingerprint = fpr
1142 session.add(fingerprint)
1143 session.commit_or_flush()
1148 __all__.append('get_or_set_fingerprint')
1150 ################################################################################
1152 # Helper routine for Keyring class
1153 def get_ldap_name(entry):
1155 for k in ["cn", "mn", "sn"]:
1157 if ret and ret[0] != "" and ret[0] != "-":
1159 return " ".join(name)
1161 ################################################################################
1163 class Keyring(object):
1167 def __init__(self, *args, **kwargs):
1171 return '<Keyring %s>' % self.keyring_name
1173 def de_escape_gpg_str(self, txt):
1174 esclist = re.split(r'(\\x..)', txt)
1175 for x in range(1,len(esclist),2):
1176 esclist[x] = "%c" % (int(esclist[x][2:],16))
1177 return "".join(esclist)
1179 def parse_address(self, uid):
1180 """parses uid and returns a tuple of real name and email address"""
1182 (name, address) = email.Utils.parseaddr(uid)
1183 name = re.sub(r"\s*[(].*[)]", "", name)
1184 name = self.de_escape_gpg_str(name)
1187 return (name, address)
1189 def load_keys(self, keyring):
1190 if not self.keyring_id:
1191 raise Exception('Must be initialized with database information')
1193 cmd = ["gpg", "--no-default-keyring", "--keyring", keyring,
1194 "--with-colons", "--fingerprint", "--fingerprint"]
1195 p = daklib.daksubprocess.Popen(cmd, stdout=subprocess.PIPE)
1198 need_fingerprint = False
1200 for line in p.stdout:
1201 field = line.split(":")
1202 if field[0] == "pub":
1205 (name, addr) = self.parse_address(field[9])
1207 self.keys[key]["email"] = addr
1208 self.keys[key]["name"] = name
1209 need_fingerprint = True
1210 elif key and field[0] == "uid":
1211 (name, addr) = self.parse_address(field[9])
1212 if "email" not in self.keys[key] and "@" in addr:
1213 self.keys[key]["email"] = addr
1214 self.keys[key]["name"] = name
1215 elif need_fingerprint and field[0] == "fpr":
1216 self.keys[key]["fingerprints"] = [field[9]]
1217 self.fpr_lookup[field[9]] = key
1218 need_fingerprint = False
1222 raise subprocess.CalledProcessError(r, cmd)
1224 def import_users_from_ldap(self, session):
1228 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1229 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1230 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1232 l = ldap.open(LDAPServer)
1235 # TODO: This should request a new context and use
1236 # connection-specific options (i.e. "l.set_option(...)")
1238 # Request a new TLS context. If there was already one, libldap
1239 # would not change the TLS options (like which CAs to trust).
1240 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1241 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1242 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1243 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1246 l.simple_bind_s("","")
1247 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1248 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1249 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1251 ldap_fin_uid_id = {}
1258 uid = entry["uid"][0]
1259 name = get_ldap_name(entry)
1260 fingerprints = entry["keyFingerPrint"]
1262 for f in fingerprints:
1263 key = self.fpr_lookup.get(f, None)
1264 if key not in self.keys:
1266 self.keys[key]["uid"] = uid
1270 keyid = get_or_set_uid(uid, session).uid_id
1271 byuid[keyid] = (uid, name)
1272 byname[uid] = (keyid, name)
1274 return (byname, byuid)
1276 def generate_users_from_keyring(self, format, session):
1280 for x in self.keys.keys():
1281 if "email" not in self.keys[x]:
1283 self.keys[x]["uid"] = format % "invalid-uid"
1285 uid = format % self.keys[x]["email"]
1286 keyid = get_or_set_uid(uid, session).uid_id
1287 byuid[keyid] = (uid, self.keys[x]["name"])
1288 byname[uid] = (keyid, self.keys[x]["name"])
1289 self.keys[x]["uid"] = uid
1292 uid = format % "invalid-uid"
1293 keyid = get_or_set_uid(uid, session).uid_id
1294 byuid[keyid] = (uid, "ungeneratable user id")
1295 byname[uid] = (keyid, "ungeneratable user id")
1297 return (byname, byuid)
1299 __all__.append('Keyring')
1302 def get_keyring(keyring, session=None):
1304 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1305 If C{keyring} already has an entry, simply return the existing Keyring
1307 @type keyring: string
1308 @param keyring: the keyring name
1311 @return: the Keyring object for this keyring
1314 q = session.query(Keyring).filter_by(keyring_name=keyring)
1318 except NoResultFound:
1321 __all__.append('get_keyring')
1324 def get_active_keyring_paths(session=None):
1327 @return: list of active keyring paths
1329 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1331 __all__.append('get_active_keyring_paths')
1334 def get_primary_keyring_path(session=None):
1336 Get the full path to the highest priority active keyring
1339 @return: path to the active keyring with the highest priority or None if no
1340 keyring is configured
1342 keyrings = get_active_keyring_paths()
1344 if len(keyrings) > 0:
1349 __all__.append('get_primary_keyring_path')
1351 ################################################################################
1353 class DBChange(object):
1354 def __init__(self, *args, **kwargs):
1358 return '<DBChange %s>' % self.changesname
1360 __all__.append('DBChange')
1363 def get_dbchange(filename, session=None):
1365 returns DBChange object for given C{filename}.
1367 @type filename: string
1368 @param filename: the name of the file
1370 @type session: Session
1371 @param session: Optional SQLA session object (a temporary one will be
1372 generated if not supplied)
1375 @return: DBChange object for the given filename (C{None} if not present)
1378 q = session.query(DBChange).filter_by(changesname=filename)
1382 except NoResultFound:
1385 __all__.append('get_dbchange')
1387 ################################################################################
1389 class Maintainer(ORMObject):
1390 def __init__(self, name = None):
1393 def properties(self):
1394 return ['name', 'maintainer_id']
1396 def not_null_constraints(self):
1399 def get_split_maintainer(self):
1400 if not hasattr(self, 'name') or self.name is None:
1401 return ('', '', '', '')
1403 return fix_maintainer(self.name.strip())
1405 __all__.append('Maintainer')
1408 def get_or_set_maintainer(name, session=None):
1410 Returns Maintainer object for given maintainer name.
1412 If no matching maintainer name is found, a row is inserted.
1415 @param name: The maintainer name to add
1417 @type session: SQLAlchemy
1418 @param session: Optional SQL session object (a temporary one will be
1419 generated if not supplied). If not passed, a commit will be performed at
1420 the end of the function, otherwise the caller is responsible for commiting.
1421 A flush will be performed either way.
1424 @return: the Maintainer object for the given maintainer
1427 q = session.query(Maintainer).filter_by(name=name)
1430 except NoResultFound:
1431 maintainer = Maintainer()
1432 maintainer.name = name
1433 session.add(maintainer)
1434 session.commit_or_flush()
1439 __all__.append('get_or_set_maintainer')
1442 def get_maintainer(maintainer_id, session=None):
1444 Return the name of the maintainer behind C{maintainer_id} or None if that
1445 maintainer_id is invalid.
1447 @type maintainer_id: int
1448 @param maintainer_id: the id of the maintainer
1451 @return: the Maintainer with this C{maintainer_id}
1454 return session.query(Maintainer).get(maintainer_id)
1456 __all__.append('get_maintainer')
1458 ################################################################################
1460 class NewComment(object):
1461 def __init__(self, *args, **kwargs):
1465 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1467 __all__.append('NewComment')
1470 def has_new_comment(policy_queue, package, version, session=None):
1472 Returns true if the given combination of C{package}, C{version} has a comment.
1474 @type package: string
1475 @param package: name of the package
1477 @type version: string
1478 @param version: package version
1480 @type session: Session
1481 @param session: Optional SQLA session object (a temporary one will be
1482 generated if not supplied)
1488 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1489 q = q.filter_by(package=package)
1490 q = q.filter_by(version=version)
1492 return bool(q.count() > 0)
1494 __all__.append('has_new_comment')
1497 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1499 Returns (possibly empty) list of NewComment objects for the given
1502 @type package: string (optional)
1503 @param package: name of the package
1505 @type version: string (optional)
1506 @param version: package version
1508 @type comment_id: int (optional)
1509 @param comment_id: An id of a comment
1511 @type session: Session
1512 @param session: Optional SQLA session object (a temporary one will be
1513 generated if not supplied)
1516 @return: A (possibly empty) list of NewComment objects will be returned
1519 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1520 if package is not None: q = q.filter_by(package=package)
1521 if version is not None: q = q.filter_by(version=version)
1522 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1526 __all__.append('get_new_comments')
1528 ################################################################################
1530 class Override(ORMObject):
1531 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1532 section = None, priority = None):
1533 self.package = package
1535 self.component = component
1536 self.overridetype = overridetype
1537 self.section = section
1538 self.priority = priority
1540 def properties(self):
1541 return ['package', 'suite', 'component', 'overridetype', 'section', \
1544 def not_null_constraints(self):
1545 return ['package', 'suite', 'component', 'overridetype', 'section']
1547 __all__.append('Override')
1550 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1552 Returns Override object for the given parameters
1554 @type package: string
1555 @param package: The name of the package
1557 @type suite: string, list or None
1558 @param suite: The name of the suite (or suites if a list) to limit to. If
1559 None, don't limit. Defaults to None.
1561 @type component: string, list or None
1562 @param component: The name of the component (or components if a list) to
1563 limit to. If None, don't limit. Defaults to None.
1565 @type overridetype: string, list or None
1566 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1567 limit to. If None, don't limit. Defaults to None.
1569 @type session: Session
1570 @param session: Optional SQLA session object (a temporary one will be
1571 generated if not supplied)
1574 @return: A (possibly empty) list of Override objects will be returned
1577 q = session.query(Override)
1578 q = q.filter_by(package=package)
1580 if suite is not None:
1581 if not isinstance(suite, list): suite = [suite]
1582 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1584 if component is not None:
1585 if not isinstance(component, list): component = [component]
1586 q = q.join(Component).filter(Component.component_name.in_(component))
1588 if overridetype is not None:
1589 if not isinstance(overridetype, list): overridetype = [overridetype]
1590 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1594 __all__.append('get_override')
1597 ################################################################################
1599 class OverrideType(ORMObject):
1600 def __init__(self, overridetype = None):
1601 self.overridetype = overridetype
1603 def properties(self):
1604 return ['overridetype', 'overridetype_id', 'overrides_count']
1606 def not_null_constraints(self):
1607 return ['overridetype']
1609 __all__.append('OverrideType')
1612 def get_override_type(override_type, session=None):
1614 Returns OverrideType object for given C{override type}.
1616 @type override_type: string
1617 @param override_type: The name of the override type
1619 @type session: Session
1620 @param session: Optional SQLA session object (a temporary one will be
1621 generated if not supplied)
1624 @return: the database id for the given override type
1627 q = session.query(OverrideType).filter_by(overridetype=override_type)
1631 except NoResultFound:
1634 __all__.append('get_override_type')
1636 ################################################################################
1638 class PolicyQueue(object):
1639 def __init__(self, *args, **kwargs):
1643 return '<PolicyQueue %s>' % self.queue_name
1645 __all__.append('PolicyQueue')
1648 def get_policy_queue(queuename, session=None):
1650 Returns PolicyQueue object for given C{queue name}
1652 @type queuename: string
1653 @param queuename: The name of the queue
1655 @type session: Session
1656 @param session: Optional SQLA session object (a temporary one will be
1657 generated if not supplied)
1660 @return: PolicyQueue object for the given queue
1663 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1667 except NoResultFound:
1670 __all__.append('get_policy_queue')
1672 ################################################################################
1674 class PolicyQueueUpload(object):
1675 def __cmp__(self, other):
1676 ret = cmp(self.changes.source, other.changes.source)
1678 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1680 if self.source is not None and other.source is None:
1682 elif self.source is None and other.source is not None:
1685 ret = cmp(self.changes.changesname, other.changes.changesname)
1688 __all__.append('PolicyQueueUpload')
1690 ################################################################################
1692 class PolicyQueueByhandFile(object):
1695 __all__.append('PolicyQueueByhandFile')
1697 ################################################################################
1699 class Priority(ORMObject):
1700 def __init__(self, priority = None, level = None):
1701 self.priority = priority
1704 def properties(self):
1705 return ['priority', 'priority_id', 'level', 'overrides_count']
1707 def not_null_constraints(self):
1708 return ['priority', 'level']
1710 def __eq__(self, val):
1711 if isinstance(val, str):
1712 return (self.priority == val)
1713 # This signals to use the normal comparison operator
1714 return NotImplemented
1716 def __ne__(self, val):
1717 if isinstance(val, str):
1718 return (self.priority != val)
1719 # This signals to use the normal comparison operator
1720 return NotImplemented
1722 __all__.append('Priority')
1725 def get_priority(priority, session=None):
1727 Returns Priority object for given C{priority name}.
1729 @type priority: string
1730 @param priority: The name of the priority
1732 @type session: Session
1733 @param session: Optional SQLA session object (a temporary one will be
1734 generated if not supplied)
1737 @return: Priority object for the given priority
1740 q = session.query(Priority).filter_by(priority=priority)
1744 except NoResultFound:
1747 __all__.append('get_priority')
1750 def get_priorities(session=None):
1752 Returns dictionary of priority names -> id mappings
1754 @type session: Session
1755 @param session: Optional SQL session object (a temporary one will be
1756 generated if not supplied)
1759 @return: dictionary of priority names -> id mappings
1763 q = session.query(Priority)
1765 ret[x.priority] = x.priority_id
1769 __all__.append('get_priorities')
1771 ################################################################################
1773 class Section(ORMObject):
1774 def __init__(self, section = None):
1775 self.section = section
1777 def properties(self):
1778 return ['section', 'section_id', 'overrides_count']
1780 def not_null_constraints(self):
1783 def __eq__(self, val):
1784 if isinstance(val, str):
1785 return (self.section == val)
1786 # This signals to use the normal comparison operator
1787 return NotImplemented
1789 def __ne__(self, val):
1790 if isinstance(val, str):
1791 return (self.section != val)
1792 # This signals to use the normal comparison operator
1793 return NotImplemented
1795 __all__.append('Section')
1798 def get_section(section, session=None):
1800 Returns Section object for given C{section name}.
1802 @type section: string
1803 @param section: The name of the section
1805 @type session: Session
1806 @param session: Optional SQLA session object (a temporary one will be
1807 generated if not supplied)
1810 @return: Section object for the given section name
1813 q = session.query(Section).filter_by(section=section)
1817 except NoResultFound:
1820 __all__.append('get_section')
1823 def get_sections(session=None):
1825 Returns dictionary of section names -> id mappings
1827 @type session: Session
1828 @param session: Optional SQL session object (a temporary one will be
1829 generated if not supplied)
1832 @return: dictionary of section names -> id mappings
1836 q = session.query(Section)
1838 ret[x.section] = x.section_id
1842 __all__.append('get_sections')
1844 ################################################################################
1846 class SignatureHistory(ORMObject):
1848 def from_signed_file(cls, signed_file):
1849 """signature history entry from signed file
1851 @type signed_file: L{daklib.gpg.SignedFile}
1852 @param signed_file: signed file
1854 @rtype: L{SignatureHistory}
1857 self.fingerprint = signed_file.primary_fingerprint
1858 self.signature_timestamp = signed_file.signature_timestamp
1859 self.contents_sha1 = signed_file.contents_sha1()
1862 def query(self, session):
1863 return session.query(SignatureHistory).filter_by(fingerprint=self.fingerprint, signature_timestamp=self.signature_timestamp, contents_sha1=self.contents_sha1).first()
1865 __all__.append('SignatureHistory')
1867 ################################################################################
1869 class SrcContents(ORMObject):
1870 def __init__(self, file = None, source = None):
1872 self.source = source
1874 def properties(self):
1875 return ['file', 'source']
1877 __all__.append('SrcContents')
1879 ################################################################################
1881 from debian.debfile import Deb822
1883 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1884 class Dak822(Deb822):
1885 def _internal_parser(self, sequence, fields=None):
1886 # The key is non-whitespace, non-colon characters before any colon.
1887 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1888 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1889 multi = re.compile(key_part + r"$")
1890 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1892 wanted_field = lambda f: fields is None or f in fields
1894 if isinstance(sequence, basestring):
1895 sequence = sequence.splitlines()
1899 for line in self.gpg_stripped_paragraph(sequence):
1900 m = single.match(line)
1903 self[curkey] = content
1905 if not wanted_field(m.group('key')):
1909 curkey = m.group('key')
1910 content = m.group('data')
1913 m = multi.match(line)
1916 self[curkey] = content
1918 if not wanted_field(m.group('key')):
1922 curkey = m.group('key')
1926 m = multidata.match(line)
1928 content += '\n' + line # XXX not m.group('data')?
1932 self[curkey] = content
1935 class DBSource(ORMObject):
1936 def __init__(self, source = None, version = None, maintainer = None, \
1937 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1938 self.source = source
1939 self.version = version
1940 self.maintainer = maintainer
1941 self.changedby = changedby
1942 self.poolfile = poolfile
1943 self.install_date = install_date
1944 self.fingerprint = fingerprint
1948 return self.source_id
1950 def properties(self):
1951 return ['source', 'source_id', 'maintainer', 'changedby', \
1952 'fingerprint', 'poolfile', 'version', 'suites_count', \
1953 'install_date', 'binaries_count', 'uploaders_count']
1955 def not_null_constraints(self):
1956 return ['source', 'version', 'install_date', 'maintainer', \
1957 'changedby', 'poolfile']
1959 def read_control_fields(self):
1961 Reads the control information from a dsc
1964 @return: fields is the dsc information in a dictionary form
1966 fullpath = self.poolfile.fullpath
1967 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1970 metadata = association_proxy('key', 'value')
1972 def scan_contents(self):
1974 Returns a set of names for non directories. The path names are
1975 normalized after converting them from either utf-8 or iso8859-1
1978 fullpath = self.poolfile.fullpath
1979 from daklib.contents import UnpackedSource
1980 unpacked = UnpackedSource(fullpath)
1982 for name in unpacked.get_all_filenames():
1983 # enforce proper utf-8 encoding
1985 name.decode('utf-8')
1986 except UnicodeDecodeError:
1987 name = name.decode('iso8859-1').encode('utf-8')
1993 session = object_session(self)
1994 query = session.query(SourceMetadata).filter_by(source=self)
1995 return MetadataProxy(session, query)
1997 __all__.append('DBSource')
2000 def source_exists(source, source_version, suites = ["any"], session=None):
2002 Ensure that source exists somewhere in the archive for the binary
2003 upload being processed.
2004 1. exact match => 1.0-3
2005 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2007 @type source: string
2008 @param source: source name
2010 @type source_version: string
2011 @param source_version: expected source version
2014 @param suites: list of suites to check in, default I{any}
2016 @type session: Session
2017 @param session: Optional SQLA session object (a temporary one will be
2018 generated if not supplied)
2021 @return: returns 1 if a source with expected version is found, otherwise 0
2028 from daklib.regexes import re_bin_only_nmu
2029 orig_source_version = re_bin_only_nmu.sub('', source_version)
2031 for suite in suites:
2032 q = session.query(DBSource).filter_by(source=source). \
2033 filter(DBSource.version.in_([source_version, orig_source_version]))
2035 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2036 s = get_suite(suite, session)
2038 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2039 considered_suites = [ vc.reference for vc in enhances_vcs ]
2040 considered_suites.append(s)
2042 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2047 # No source found so return not ok
2052 __all__.append('source_exists')
2055 def get_suites_source_in(source, session=None):
2057 Returns list of Suite objects which given C{source} name is in
2060 @param source: DBSource package name to search for
2063 @return: list of Suite objects for the given source
2066 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2068 __all__.append('get_suites_source_in')
2071 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2073 Returns list of DBSource objects for given C{source} name and other parameters
2076 @param source: DBSource package name to search for
2078 @type version: str or None
2079 @param version: DBSource version name to search for or None if not applicable
2081 @type dm_upload_allowed: bool
2082 @param dm_upload_allowed: If None, no effect. If True or False, only
2083 return packages with that dm_upload_allowed setting
2085 @type session: Session
2086 @param session: Optional SQL session object (a temporary one will be
2087 generated if not supplied)
2090 @return: list of DBSource objects for the given name (may be empty)
2093 q = session.query(DBSource).filter_by(source=source)
2095 if version is not None:
2096 q = q.filter_by(version=version)
2098 if dm_upload_allowed is not None:
2099 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2103 __all__.append('get_sources_from_name')
2105 # FIXME: This function fails badly if it finds more than 1 source package and
2106 # its implementation is trivial enough to be inlined.
2108 def get_source_in_suite(source, suite_name, session=None):
2110 Returns a DBSource object for a combination of C{source} and C{suite_name}.
2112 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2113 - B{suite_name} - a suite name, eg. I{unstable}
2115 @type source: string
2116 @param source: source package name
2118 @type suite_name: string
2119 @param suite: the suite name
2122 @return: the version for I{source} in I{suite}
2125 suite = get_suite(suite_name, session)
2129 return suite.get_sources(source).one()
2130 except NoResultFound:
2133 __all__.append('get_source_in_suite')
2136 def import_metadata_into_db(obj, session=None):
2138 This routine works on either DBBinary or DBSource objects and imports
2139 their metadata into the database
2141 fields = obj.read_control_fields()
2142 for k in fields.keys():
2145 val = str(fields[k])
2146 except UnicodeEncodeError:
2147 # Fall back to UTF-8
2149 val = fields[k].encode('utf-8')
2150 except UnicodeEncodeError:
2151 # Finally try iso8859-1
2152 val = fields[k].encode('iso8859-1')
2153 # Otherwise we allow the exception to percolate up and we cause
2154 # a reject as someone is playing silly buggers
2156 obj.metadata[get_or_set_metadatakey(k, session)] = val
2158 session.commit_or_flush()
2160 __all__.append('import_metadata_into_db')
2162 ################################################################################
2164 class SrcFormat(object):
2165 def __init__(self, *args, **kwargs):
2169 return '<SrcFormat %s>' % (self.format_name)
2171 __all__.append('SrcFormat')
2173 ################################################################################
2175 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2176 ('SuiteID', 'suite_id'),
2177 ('Version', 'version'),
2178 ('Origin', 'origin'),
2180 ('Description', 'description'),
2181 ('Untouchable', 'untouchable'),
2182 ('Announce', 'announce'),
2183 ('Codename', 'codename'),
2184 ('OverrideCodename', 'overridecodename'),
2185 ('ValidTime', 'validtime'),
2186 ('Priority', 'priority'),
2187 ('NotAutomatic', 'notautomatic'),
2188 ('CopyChanges', 'copychanges'),
2189 ('OverrideSuite', 'overridesuite')]
2191 # Why the heck don't we have any UNIQUE constraints in table suite?
2192 # TODO: Add UNIQUE constraints for appropriate columns.
2193 class Suite(ORMObject):
2194 def __init__(self, suite_name = None, version = None):
2195 self.suite_name = suite_name
2196 self.version = version
2198 def properties(self):
2199 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2202 def not_null_constraints(self):
2203 return ['suite_name']
2205 def __eq__(self, val):
2206 if isinstance(val, str):
2207 return (self.suite_name == val)
2208 # This signals to use the normal comparison operator
2209 return NotImplemented
2211 def __ne__(self, val):
2212 if isinstance(val, str):
2213 return (self.suite_name != val)
2214 # This signals to use the normal comparison operator
2215 return NotImplemented
2219 for disp, field in SUITE_FIELDS:
2220 val = getattr(self, field, None)
2222 ret.append("%s: %s" % (disp, val))
2224 return "\n".join(ret)
2226 def get_architectures(self, skipsrc=False, skipall=False):
2228 Returns list of Architecture objects
2230 @type skipsrc: boolean
2231 @param skipsrc: Whether to skip returning the 'source' architecture entry
2234 @type skipall: boolean
2235 @param skipall: Whether to skip returning the 'all' architecture entry
2239 @return: list of Architecture objects for the given name (may be empty)
2242 q = object_session(self).query(Architecture).with_parent(self)
2244 q = q.filter(Architecture.arch_string != 'source')
2246 q = q.filter(Architecture.arch_string != 'all')
2247 return q.order_by(Architecture.arch_string).all()
2249 def get_sources(self, source):
2251 Returns a query object representing DBSource that is part of C{suite}.
2253 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2255 @type source: string
2256 @param source: source package name
2258 @rtype: sqlalchemy.orm.query.Query
2259 @return: a query of DBSource
2263 session = object_session(self)
2264 return session.query(DBSource).filter_by(source = source). \
2267 def get_overridesuite(self):
2268 if self.overridesuite is None:
2271 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2275 return os.path.join(self.archive.path, 'dists', self.suite_name)
2277 __all__.append('Suite')
2280 def get_suite(suite, session=None):
2282 Returns Suite object for given C{suite name}.
2285 @param suite: The name of the suite
2287 @type session: Session
2288 @param session: Optional SQLA session object (a temporary one will be
2289 generated if not supplied)
2292 @return: Suite object for the requested suite name (None if not present)
2295 q = session.query(Suite).filter_by(suite_name=suite)
2299 except NoResultFound:
2302 __all__.append('get_suite')
2304 ################################################################################
2307 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2309 Returns list of Architecture objects for given C{suite} name. The list is
2310 empty if suite does not exist.
2313 @param suite: Suite name to search for
2315 @type skipsrc: boolean
2316 @param skipsrc: Whether to skip returning the 'source' architecture entry
2319 @type skipall: boolean
2320 @param skipall: Whether to skip returning the 'all' architecture entry
2323 @type session: Session
2324 @param session: Optional SQL session object (a temporary one will be
2325 generated if not supplied)
2328 @return: list of Architecture objects for the given name (may be empty)
2332 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2333 except AttributeError:
2336 __all__.append('get_suite_architectures')
2338 ################################################################################
2340 class Uid(ORMObject):
2341 def __init__(self, uid = None, name = None):
2345 def __eq__(self, val):
2346 if isinstance(val, str):
2347 return (self.uid == val)
2348 # This signals to use the normal comparison operator
2349 return NotImplemented
2351 def __ne__(self, val):
2352 if isinstance(val, str):
2353 return (self.uid != val)
2354 # This signals to use the normal comparison operator
2355 return NotImplemented
2357 def properties(self):
2358 return ['uid', 'name', 'fingerprint']
2360 def not_null_constraints(self):
2363 __all__.append('Uid')
2366 def get_or_set_uid(uidname, session=None):
2368 Returns uid object for given uidname.
2370 If no matching uidname is found, a row is inserted.
2372 @type uidname: string
2373 @param uidname: The uid to add
2375 @type session: SQLAlchemy
2376 @param session: Optional SQL session object (a temporary one will be
2377 generated if not supplied). If not passed, a commit will be performed at
2378 the end of the function, otherwise the caller is responsible for commiting.
2381 @return: the uid object for the given uidname
2384 q = session.query(Uid).filter_by(uid=uidname)
2388 except NoResultFound:
2392 session.commit_or_flush()
2397 __all__.append('get_or_set_uid')
2400 def get_uid_from_fingerprint(fpr, session=None):
2401 q = session.query(Uid)
2402 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2406 except NoResultFound:
2409 __all__.append('get_uid_from_fingerprint')
2411 ################################################################################
2413 class MetadataKey(ORMObject):
2414 def __init__(self, key = None):
2417 def properties(self):
2420 def not_null_constraints(self):
2423 __all__.append('MetadataKey')
2426 def get_or_set_metadatakey(keyname, session=None):
2428 Returns MetadataKey object for given uidname.
2430 If no matching keyname is found, a row is inserted.
2432 @type uidname: string
2433 @param uidname: The keyname to add
2435 @type session: SQLAlchemy
2436 @param session: Optional SQL session object (a temporary one will be
2437 generated if not supplied). If not passed, a commit will be performed at
2438 the end of the function, otherwise the caller is responsible for commiting.
2441 @return: the metadatakey object for the given keyname
2444 q = session.query(MetadataKey).filter_by(key=keyname)
2448 except NoResultFound:
2449 ret = MetadataKey(keyname)
2451 session.commit_or_flush()
2455 __all__.append('get_or_set_metadatakey')
2457 ################################################################################
2459 class BinaryMetadata(ORMObject):
2460 def __init__(self, key = None, value = None, binary = None):
2463 self.binary = binary
2465 def properties(self):
2466 return ['binary', 'key', 'value']
2468 def not_null_constraints(self):
2471 __all__.append('BinaryMetadata')
2473 ################################################################################
2475 class SourceMetadata(ORMObject):
2476 def __init__(self, key = None, value = None, source = None):
2479 self.source = source
2481 def properties(self):
2482 return ['source', 'key', 'value']
2484 def not_null_constraints(self):
2487 __all__.append('SourceMetadata')
2489 ################################################################################
2491 class MetadataProxy(object):
2492 def __init__(self, session, query):
2493 self.session = session
2496 def _get(self, key):
2497 metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
2498 if metadata_key is None:
2500 metadata = self.query.filter_by(key=metadata_key).first()
2503 def __contains__(self, key):
2504 if self._get(key) is not None:
2508 def __getitem__(self, key):
2509 metadata = self._get(key)
2510 if metadata is None:
2512 return metadata.value
2514 def get(self, key, default=None):
2520 ################################################################################
2522 class VersionCheck(ORMObject):
2523 def __init__(self, *args, **kwargs):
2526 def properties(self):
2527 #return ['suite_id', 'check', 'reference_id']
2530 def not_null_constraints(self):
2531 return ['suite', 'check', 'reference']
2533 __all__.append('VersionCheck')
2536 def get_version_checks(suite_name, check = None, session = None):
2537 suite = get_suite(suite_name, session)
2539 # Make sure that what we return is iterable so that list comprehensions
2540 # involving this don't cause a traceback
2542 q = session.query(VersionCheck).filter_by(suite=suite)
2544 q = q.filter_by(check=check)
2547 __all__.append('get_version_checks')
2549 ################################################################################
2551 class DBConn(object):
2553 database module init.
2557 def __init__(self, *args, **kwargs):
2558 self.__dict__ = self.__shared_state
2560 if not getattr(self, 'initialised', False):
2561 self.initialised = True
2562 self.debug = kwargs.has_key('debug')
2565 def __setuptables(self):
2568 'acl_architecture_map',
2569 'acl_fingerprint_map',
2576 'binaries_metadata',
2584 'external_overrides',
2585 'extra_src_references',
2587 'files_archive_map',
2593 # TODO: the maintainer column in table override should be removed.
2597 'policy_queue_upload',
2598 'policy_queue_upload_binaries_map',
2599 'policy_queue_byhand_file',
2602 'signature_history',
2611 'suite_architectures',
2612 'suite_build_queue_copy',
2613 'suite_src_formats',
2619 'almost_obsolete_all_associations',
2620 'almost_obsolete_src_associations',
2621 'any_associations_source',
2622 'bin_associations_binaries',
2623 'binaries_suite_arch',
2626 'newest_all_associations',
2627 'newest_any_associations',
2629 'newest_src_association',
2630 'obsolete_all_associations',
2631 'obsolete_any_associations',
2632 'obsolete_any_by_all_associations',
2633 'obsolete_src_associations',
2635 'src_associations_bin',
2636 'src_associations_src',
2637 'suite_arch_by_name',
2640 for table_name in tables:
2641 table = Table(table_name, self.db_meta, \
2642 autoload=True, useexisting=True)
2643 setattr(self, 'tbl_%s' % table_name, table)
2645 for view_name in views:
2646 view = Table(view_name, self.db_meta, autoload=True)
2647 setattr(self, 'view_%s' % view_name, view)
2649 def __setupmappers(self):
2650 mapper(Architecture, self.tbl_architecture,
2651 properties = dict(arch_id = self.tbl_architecture.c.id,
2652 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2653 order_by=self.tbl_suite.c.suite_name,
2654 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2655 extension = validator)
2657 mapper(ACL, self.tbl_acl,
2659 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2660 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2661 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2662 per_source = relation(ACLPerSource, collection_class=set),
2665 mapper(ACLPerSource, self.tbl_acl_per_source,
2667 acl = relation(ACL),
2668 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2669 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2672 mapper(Archive, self.tbl_archive,
2673 properties = dict(archive_id = self.tbl_archive.c.id,
2674 archive_name = self.tbl_archive.c.name))
2676 mapper(ArchiveFile, self.tbl_files_archive_map,
2677 properties = dict(archive = relation(Archive, backref='files'),
2678 component = relation(Component),
2679 file = relation(PoolFile, backref='archives')))
2681 mapper(BuildQueue, self.tbl_build_queue,
2682 properties = dict(queue_id = self.tbl_build_queue.c.id,
2683 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2685 mapper(DBBinary, self.tbl_binaries,
2686 properties = dict(binary_id = self.tbl_binaries.c.id,
2687 package = self.tbl_binaries.c.package,
2688 version = self.tbl_binaries.c.version,
2689 maintainer_id = self.tbl_binaries.c.maintainer,
2690 maintainer = relation(Maintainer),
2691 source_id = self.tbl_binaries.c.source,
2692 source = relation(DBSource, backref='binaries'),
2693 arch_id = self.tbl_binaries.c.architecture,
2694 architecture = relation(Architecture),
2695 poolfile_id = self.tbl_binaries.c.file,
2696 poolfile = relation(PoolFile),
2697 binarytype = self.tbl_binaries.c.type,
2698 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2699 fingerprint = relation(Fingerprint),
2700 install_date = self.tbl_binaries.c.install_date,
2701 suites = relation(Suite, secondary=self.tbl_bin_associations,
2702 backref=backref('binaries', lazy='dynamic')),
2703 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2704 backref=backref('extra_binary_references', lazy='dynamic')),
2705 key = relation(BinaryMetadata, cascade='all',
2706 collection_class=attribute_mapped_collection('key'))),
2707 extension = validator)
2709 mapper(Component, self.tbl_component,
2710 properties = dict(component_id = self.tbl_component.c.id,
2711 component_name = self.tbl_component.c.name),
2712 extension = validator)
2714 mapper(DBConfig, self.tbl_config,
2715 properties = dict(config_id = self.tbl_config.c.id))
2717 mapper(DSCFile, self.tbl_dsc_files,
2718 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2719 source_id = self.tbl_dsc_files.c.source,
2720 source = relation(DBSource),
2721 poolfile_id = self.tbl_dsc_files.c.file,
2722 poolfile = relation(PoolFile)))
2724 mapper(ExternalOverride, self.tbl_external_overrides,
2726 suite_id = self.tbl_external_overrides.c.suite,
2727 suite = relation(Suite),
2728 component_id = self.tbl_external_overrides.c.component,
2729 component = relation(Component)))
2731 mapper(PoolFile, self.tbl_files,
2732 properties = dict(file_id = self.tbl_files.c.id,
2733 filesize = self.tbl_files.c.size),
2734 extension = validator)
2736 mapper(Fingerprint, self.tbl_fingerprint,
2737 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2738 uid_id = self.tbl_fingerprint.c.uid,
2739 uid = relation(Uid),
2740 keyring_id = self.tbl_fingerprint.c.keyring,
2741 keyring = relation(Keyring),
2742 acl = relation(ACL)),
2743 extension = validator)
2745 mapper(Keyring, self.tbl_keyrings,
2746 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2747 keyring_id = self.tbl_keyrings.c.id,
2748 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2750 mapper(DBChange, self.tbl_changes,
2751 properties = dict(change_id = self.tbl_changes.c.id,
2752 seen = self.tbl_changes.c.seen,
2753 source = self.tbl_changes.c.source,
2754 binaries = self.tbl_changes.c.binaries,
2755 architecture = self.tbl_changes.c.architecture,
2756 distribution = self.tbl_changes.c.distribution,
2757 urgency = self.tbl_changes.c.urgency,
2758 maintainer = self.tbl_changes.c.maintainer,
2759 changedby = self.tbl_changes.c.changedby,
2760 date = self.tbl_changes.c.date,
2761 version = self.tbl_changes.c.version))
2763 mapper(Maintainer, self.tbl_maintainer,
2764 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2765 maintains_sources = relation(DBSource, backref='maintainer',
2766 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2767 changed_sources = relation(DBSource, backref='changedby',
2768 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2769 extension = validator)
2771 mapper(NewComment, self.tbl_new_comments,
2772 properties = dict(comment_id = self.tbl_new_comments.c.id,
2773 policy_queue = relation(PolicyQueue)))
2775 mapper(Override, self.tbl_override,
2776 properties = dict(suite_id = self.tbl_override.c.suite,
2777 suite = relation(Suite, \
2778 backref=backref('overrides', lazy='dynamic')),
2779 package = self.tbl_override.c.package,
2780 component_id = self.tbl_override.c.component,
2781 component = relation(Component, \
2782 backref=backref('overrides', lazy='dynamic')),
2783 priority_id = self.tbl_override.c.priority,
2784 priority = relation(Priority, \
2785 backref=backref('overrides', lazy='dynamic')),
2786 section_id = self.tbl_override.c.section,
2787 section = relation(Section, \
2788 backref=backref('overrides', lazy='dynamic')),
2789 overridetype_id = self.tbl_override.c.type,
2790 overridetype = relation(OverrideType, \
2791 backref=backref('overrides', lazy='dynamic'))))
2793 mapper(OverrideType, self.tbl_override_type,
2794 properties = dict(overridetype = self.tbl_override_type.c.type,
2795 overridetype_id = self.tbl_override_type.c.id))
2797 mapper(PolicyQueue, self.tbl_policy_queue,
2798 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2799 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2801 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2803 changes = relation(DBChange),
2804 policy_queue = relation(PolicyQueue, backref='uploads'),
2805 target_suite = relation(Suite),
2806 source = relation(DBSource),
2807 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2810 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2812 upload = relation(PolicyQueueUpload, backref='byhand'),
2816 mapper(Priority, self.tbl_priority,
2817 properties = dict(priority_id = self.tbl_priority.c.id))
2819 mapper(Section, self.tbl_section,
2820 properties = dict(section_id = self.tbl_section.c.id,
2821 section=self.tbl_section.c.section))
2823 mapper(SignatureHistory, self.tbl_signature_history)
2825 mapper(DBSource, self.tbl_source,
2826 properties = dict(source_id = self.tbl_source.c.id,
2827 version = self.tbl_source.c.version,
2828 maintainer_id = self.tbl_source.c.maintainer,
2829 poolfile_id = self.tbl_source.c.file,
2830 poolfile = relation(PoolFile),
2831 fingerprint_id = self.tbl_source.c.sig_fpr,
2832 fingerprint = relation(Fingerprint),
2833 changedby_id = self.tbl_source.c.changedby,
2834 srcfiles = relation(DSCFile,
2835 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2836 suites = relation(Suite, secondary=self.tbl_src_associations,
2837 backref=backref('sources', lazy='dynamic')),
2838 uploaders = relation(Maintainer,
2839 secondary=self.tbl_src_uploaders),
2840 key = relation(SourceMetadata, cascade='all',
2841 collection_class=attribute_mapped_collection('key'))),
2842 extension = validator)
2844 mapper(SrcFormat, self.tbl_src_format,
2845 properties = dict(src_format_id = self.tbl_src_format.c.id,
2846 format_name = self.tbl_src_format.c.format_name))
2848 mapper(Suite, self.tbl_suite,
2849 properties = dict(suite_id = self.tbl_suite.c.id,
2850 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2851 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2852 copy_queues = relation(BuildQueue,
2853 secondary=self.tbl_suite_build_queue_copy),
2854 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2855 backref=backref('suites', lazy='dynamic')),
2856 archive = relation(Archive, backref='suites'),
2857 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2858 components = relation(Component, secondary=self.tbl_component_suite,
2859 order_by=self.tbl_component.c.ordering,
2860 backref=backref('suites'))),
2861 extension = validator)
2863 mapper(Uid, self.tbl_uid,
2864 properties = dict(uid_id = self.tbl_uid.c.id,
2865 fingerprint = relation(Fingerprint)),
2866 extension = validator)
2868 mapper(BinContents, self.tbl_bin_contents,
2870 binary = relation(DBBinary,
2871 backref=backref('contents', lazy='dynamic', cascade='all')),
2872 file = self.tbl_bin_contents.c.file))
2874 mapper(SrcContents, self.tbl_src_contents,
2876 source = relation(DBSource,
2877 backref=backref('contents', lazy='dynamic', cascade='all')),
2878 file = self.tbl_src_contents.c.file))
2880 mapper(MetadataKey, self.tbl_metadata_keys,
2882 key_id = self.tbl_metadata_keys.c.key_id,
2883 key = self.tbl_metadata_keys.c.key))
2885 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2887 binary_id = self.tbl_binaries_metadata.c.bin_id,
2888 binary = relation(DBBinary),
2889 key_id = self.tbl_binaries_metadata.c.key_id,
2890 key = relation(MetadataKey),
2891 value = self.tbl_binaries_metadata.c.value))
2893 mapper(SourceMetadata, self.tbl_source_metadata,
2895 source_id = self.tbl_source_metadata.c.src_id,
2896 source = relation(DBSource),
2897 key_id = self.tbl_source_metadata.c.key_id,
2898 key = relation(MetadataKey),
2899 value = self.tbl_source_metadata.c.value))
2901 mapper(VersionCheck, self.tbl_version_check,
2903 suite_id = self.tbl_version_check.c.suite,
2904 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2905 reference_id = self.tbl_version_check.c.reference,
2906 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2908 ## Connection functions
2909 def __createconn(self):
2910 from config import Config
2912 if cnf.has_key("DB::Service"):
2913 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2914 elif cnf.has_key("DB::Host"):
2916 connstr = "postgresql://%s" % cnf["DB::Host"]
2917 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2918 connstr += ":%s" % cnf["DB::Port"]
2919 connstr += "/%s" % cnf["DB::Name"]
2922 connstr = "postgresql:///%s" % cnf["DB::Name"]
2923 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2924 connstr += "?port=%s" % cnf["DB::Port"]
2926 engine_args = { 'echo': self.debug }
2927 if cnf.has_key('DB::PoolSize'):
2928 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2929 if cnf.has_key('DB::MaxOverflow'):
2930 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2931 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2932 cnf['DB::Unicode'] == 'false':
2933 engine_args['use_native_unicode'] = False
2935 # Monkey patch a new dialect in in order to support service= syntax
2936 import sqlalchemy.dialects.postgresql
2937 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2938 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2939 def create_connect_args(self, url):
2940 if str(url).startswith('postgresql://service='):
2942 servicename = str(url)[21:]
2943 return (['service=%s' % servicename], {})
2945 return PGDialect_psycopg2.create_connect_args(self, url)
2947 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2950 self.db_pg = create_engine(connstr, **engine_args)
2951 self.db_meta = MetaData()
2952 self.db_meta.bind = self.db_pg
2953 self.db_smaker = sessionmaker(bind=self.db_pg,
2957 self.__setuptables()
2958 self.__setupmappers()
2960 except OperationalError as e:
2962 utils.fubar("Cannot connect to database (%s)" % str(e))
2964 self.pid = os.getpid()
2966 def session(self, work_mem = 0):
2968 Returns a new session object. If a work_mem parameter is provided a new
2969 transaction is started and the work_mem parameter is set for this
2970 transaction. The work_mem parameter is measured in MB. A default value
2971 will be used if the parameter is not set.
2973 # reinitialize DBConn in new processes
2974 if self.pid != os.getpid():
2977 session = self.db_smaker()
2979 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2982 __all__.append('DBConn')