5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 import daklib.daksubprocess
39 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from tarfile import TarFile
57 from inspect import getargspec
60 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
62 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
63 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
64 from sqlalchemy import types as sqltypes
65 from sqlalchemy.orm.collections import attribute_mapped_collection
66 from sqlalchemy.ext.associationproxy import association_proxy
68 # Don't remove this, we re-export the exceptions to scripts which import us
69 from sqlalchemy.exc import *
70 from sqlalchemy.orm.exc import NoResultFound
72 # Only import Config until Queue stuff is changed to store its config
74 from config import Config
75 from textutils import fix_maintainer
76 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
78 # suppress some deprecation warnings in squeeze related to sqlalchemy
80 warnings.filterwarnings('ignore', \
81 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
83 warnings.filterwarnings('ignore', \
84 "Predicate of partial index .* ignored during reflection", \
88 ################################################################################
90 # Patch in support for the debversion field type so that it works during
94 # that is for sqlalchemy 0.6
95 UserDefinedType = sqltypes.UserDefinedType
97 # this one for sqlalchemy 0.5
98 UserDefinedType = sqltypes.TypeEngine
100 class DebVersion(UserDefinedType):
101 def get_col_spec(self):
104 def bind_processor(self, dialect):
107 # ' = None' is needed for sqlalchemy 0.5:
108 def result_processor(self, dialect, coltype = None):
111 sa_major_version = sqlalchemy.__version__[0:3]
112 if sa_major_version in ["0.5", "0.6", "0.7", "0.8", "0.9"]:
113 from sqlalchemy.databases import postgres
114 postgres.ischema_names['debversion'] = DebVersion
116 raise Exception("dak only ported to SQLA versions 0.5 to 0.9. See daklib/dbconn.py")
118 ################################################################################
120 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
122 ################################################################################
124 def session_wrapper(fn):
126 Wrapper around common ".., session=None):" handling. If the wrapped
127 function is called without passing 'session', we create a local one
128 and destroy it when the function ends.
130 Also attaches a commit_or_flush method to the session; if we created a
131 local session, this is a synonym for session.commit(), otherwise it is a
132 synonym for session.flush().
135 def wrapped(*args, **kwargs):
136 private_transaction = False
138 # Find the session object
139 session = kwargs.get('session')
142 if len(args) <= len(getargspec(fn)[0]) - 1:
143 # No session specified as last argument or in kwargs
144 private_transaction = True
145 session = kwargs['session'] = DBConn().session()
147 # Session is last argument in args
151 session = args[-1] = DBConn().session()
152 private_transaction = True
154 if private_transaction:
155 session.commit_or_flush = session.commit
157 session.commit_or_flush = session.flush
160 return fn(*args, **kwargs)
162 if private_transaction:
163 # We created a session; close it.
166 wrapped.__doc__ = fn.__doc__
167 wrapped.func_name = fn.func_name
171 __all__.append('session_wrapper')
173 ################################################################################
175 class ORMObject(object):
177 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
178 derived classes must implement the properties() method.
181 def properties(self):
183 This method should be implemented by all derived classes and returns a
184 list of the important properties. The properties 'created' and
185 'modified' will be added automatically. A suffix '_count' should be
186 added to properties that are lists or query objects. The most important
187 property name should be returned as the first element in the list
188 because it is used by repr().
194 Returns a JSON representation of the object based on the properties
195 returned from the properties() method.
198 # add created and modified
199 all_properties = self.properties() + ['created', 'modified']
200 for property in all_properties:
201 # check for list or query
202 if property[-6:] == '_count':
203 real_property = property[:-6]
204 if not hasattr(self, real_property):
206 value = getattr(self, real_property)
207 if hasattr(value, '__len__'):
210 elif hasattr(value, 'count'):
211 # query (but not during validation)
212 if self.in_validation:
214 value = value.count()
216 raise KeyError('Do not understand property %s.' % property)
218 if not hasattr(self, property):
221 value = getattr(self, property)
225 elif isinstance(value, ORMObject):
226 # use repr() for ORMObject types
229 # we want a string for all other types because json cannot
232 data[property] = value
233 return json.dumps(data)
237 Returns the name of the class.
239 return type(self).__name__
243 Returns a short string representation of the object using the first
244 element from the properties() method.
246 primary_property = self.properties()[0]
247 value = getattr(self, primary_property)
248 return '<%s %s>' % (self.classname(), str(value))
252 Returns a human readable form of the object using the properties()
255 return '<%s %s>' % (self.classname(), self.json())
257 def not_null_constraints(self):
259 Returns a list of properties that must be not NULL. Derived classes
260 should override this method if needed.
264 validation_message = \
265 "Validation failed because property '%s' must not be empty in object\n%s"
267 in_validation = False
271 This function validates the not NULL constraints as returned by
272 not_null_constraints(). It raises the DBUpdateError exception if
275 for property in self.not_null_constraints():
276 # TODO: It is a bit awkward that the mapper configuration allow
277 # directly setting the numeric _id columns. We should get rid of it
279 if hasattr(self, property + '_id') and \
280 getattr(self, property + '_id') is not None:
282 if not hasattr(self, property) or getattr(self, property) is None:
283 # str() might lead to races due to a 2nd flush
284 self.in_validation = True
285 message = self.validation_message % (property, str(self))
286 self.in_validation = False
287 raise DBUpdateError(message)
291 def get(cls, primary_key, session = None):
293 This is a support function that allows getting an object by its primary
296 Architecture.get(3[, session])
298 instead of the more verbose
300 session.query(Architecture).get(3)
302 return session.query(cls).get(primary_key)
304 def session(self, replace = False):
306 Returns the current session that is associated with the object. May
307 return None is object is in detached state.
310 return object_session(self)
312 def clone(self, session = None):
314 Clones the current object in a new session and returns the new clone. A
315 fresh session is created if the optional session parameter is not
316 provided. The function will fail if a session is provided and has
319 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
320 an existing object to allow several threads to work with their own
321 instances of an ORMObject.
323 WARNING: Only persistent (committed) objects can be cloned. Changes
324 made to the original object that are not committed yet will get lost.
325 The session of the new object will always be rolled back to avoid
329 if self.session() is None:
330 raise RuntimeError( \
331 'Method clone() failed for detached object:\n%s' % self)
332 self.session().flush()
333 mapper = object_mapper(self)
334 primary_key = mapper.primary_key_from_instance(self)
335 object_class = self.__class__
337 session = DBConn().session()
338 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
339 raise RuntimeError( \
340 'Method clone() failed due to unflushed changes in session.')
341 new_object = session.query(object_class).get(primary_key)
343 if new_object is None:
344 raise RuntimeError( \
345 'Method clone() failed for non-persistent object:\n%s' % self)
348 __all__.append('ORMObject')
350 ################################################################################
352 class Validator(MapperExtension):
354 This class calls the validate() method for each instance for the
355 'before_update' and 'before_insert' events. A global object validator is
356 used for configuring the individual mappers.
359 def before_update(self, mapper, connection, instance):
363 def before_insert(self, mapper, connection, instance):
367 validator = Validator()
369 ################################################################################
371 class ACL(ORMObject):
373 return "<ACL {0}>".format(self.name)
375 __all__.append('ACL')
377 class ACLPerSource(ORMObject):
379 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
381 __all__.append('ACLPerSource')
383 ################################################################################
385 class Architecture(ORMObject):
386 def __init__(self, arch_string = None, description = None):
387 self.arch_string = arch_string
388 self.description = description
390 def __eq__(self, val):
391 if isinstance(val, str):
392 return (self.arch_string== val)
393 # This signals to use the normal comparison operator
394 return NotImplemented
396 def __ne__(self, val):
397 if isinstance(val, str):
398 return (self.arch_string != val)
399 # This signals to use the normal comparison operator
400 return NotImplemented
402 def properties(self):
403 return ['arch_string', 'arch_id', 'suites_count']
405 def not_null_constraints(self):
406 return ['arch_string']
408 __all__.append('Architecture')
411 def get_architecture(architecture, session=None):
413 Returns database id for given C{architecture}.
415 @type architecture: string
416 @param architecture: The name of the architecture
418 @type session: Session
419 @param session: Optional SQLA session object (a temporary one will be
420 generated if not supplied)
423 @return: Architecture object for the given arch (None if not present)
426 q = session.query(Architecture).filter_by(arch_string=architecture)
430 except NoResultFound:
433 __all__.append('get_architecture')
435 ################################################################################
437 class Archive(object):
438 def __init__(self, *args, **kwargs):
442 return '<Archive %s>' % self.archive_name
444 __all__.append('Archive')
447 def get_archive(archive, session=None):
449 returns database id for given C{archive}.
451 @type archive: string
452 @param archive: the name of the arhive
454 @type session: Session
455 @param session: Optional SQLA session object (a temporary one will be
456 generated if not supplied)
459 @return: Archive object for the given name (None if not present)
462 archive = archive.lower()
464 q = session.query(Archive).filter_by(archive_name=archive)
468 except NoResultFound:
471 __all__.append('get_archive')
473 ################################################################################
475 class ArchiveFile(object):
476 def __init__(self, archive=None, component=None, file=None):
477 self.archive = archive
478 self.component = component
482 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
484 __all__.append('ArchiveFile')
486 ################################################################################
488 class BinContents(ORMObject):
489 def __init__(self, file = None, binary = None):
493 def properties(self):
494 return ['file', 'binary']
496 __all__.append('BinContents')
498 ################################################################################
500 class DBBinary(ORMObject):
501 def __init__(self, package = None, source = None, version = None, \
502 maintainer = None, architecture = None, poolfile = None, \
503 binarytype = 'deb', fingerprint=None):
504 self.package = package
506 self.version = version
507 self.maintainer = maintainer
508 self.architecture = architecture
509 self.poolfile = poolfile
510 self.binarytype = binarytype
511 self.fingerprint = fingerprint
515 return self.binary_id
517 def properties(self):
518 return ['package', 'version', 'maintainer', 'source', 'architecture', \
519 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
520 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
522 def not_null_constraints(self):
523 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
526 metadata = association_proxy('key', 'value')
528 def scan_contents(self):
530 Yields the contents of the package. Only regular files are yielded and
531 the path names are normalized after converting them from either utf-8
532 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
533 package does not contain any regular file.
535 fullpath = self.poolfile.fullpath
536 dpkg_cmd = ('dpkg-deb', '--fsys-tarfile', fullpath)
537 dpkg = daklib.daksubprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE)
538 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
539 for member in tar.getmembers():
540 if not member.isdir():
541 name = normpath(member.name)
542 # enforce proper utf-8 encoding
545 except UnicodeDecodeError:
546 name = name.decode('iso8859-1').encode('utf-8')
552 def read_control(self):
554 Reads the control information from a binary.
557 @return: stanza text of the control section.
560 fullpath = self.poolfile.fullpath
561 with open(fullpath, 'r') as deb_file:
562 return utils.deb_extract_control(deb_file)
564 def read_control_fields(self):
566 Reads the control information from a binary and return
570 @return: fields of the control section as a dictionary.
572 stanza = self.read_control()
573 return apt_pkg.TagSection(stanza)
577 session = object_session(self)
578 query = session.query(BinaryMetadata).filter_by(binary=self)
579 return MetadataProxy(session, query)
581 __all__.append('DBBinary')
584 def get_suites_binary_in(package, session=None):
586 Returns list of Suite objects which given C{package} name is in
589 @param package: DBBinary package name to search for
592 @return: list of Suite objects for the given package
595 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
597 __all__.append('get_suites_binary_in')
600 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
602 Returns the component name of the newest binary package in suite_list or
603 None if no package is found. The result can be optionally filtered by a list
604 of architecture names.
607 @param package: DBBinary package name to search for
609 @type suite_list: list of str
610 @param suite_list: list of suite_name items
612 @type arch_list: list of str
613 @param arch_list: optional list of arch_string items that defaults to []
615 @rtype: str or NoneType
616 @return: name of component or None
619 q = session.query(DBBinary).filter_by(package = package). \
620 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
621 if len(arch_list) > 0:
622 q = q.join(DBBinary.architecture). \
623 filter(Architecture.arch_string.in_(arch_list))
624 binary = q.order_by(desc(DBBinary.version)).first()
628 return binary.poolfile.component.component_name
630 __all__.append('get_component_by_package_suite')
632 ################################################################################
634 class BuildQueue(object):
635 def __init__(self, *args, **kwargs):
639 return '<BuildQueue %s>' % self.queue_name
641 __all__.append('BuildQueue')
643 ################################################################################
645 class Component(ORMObject):
646 def __init__(self, component_name = None):
647 self.component_name = component_name
649 def __eq__(self, val):
650 if isinstance(val, str):
651 return (self.component_name == val)
652 # This signals to use the normal comparison operator
653 return NotImplemented
655 def __ne__(self, val):
656 if isinstance(val, str):
657 return (self.component_name != val)
658 # This signals to use the normal comparison operator
659 return NotImplemented
661 def properties(self):
662 return ['component_name', 'component_id', 'description', \
663 'meets_dfsg', 'overrides_count']
665 def not_null_constraints(self):
666 return ['component_name']
669 __all__.append('Component')
672 def get_component(component, session=None):
674 Returns database id for given C{component}.
676 @type component: string
677 @param component: The name of the override type
680 @return: the database id for the given component
683 component = component.lower()
685 q = session.query(Component).filter_by(component_name=component)
689 except NoResultFound:
692 __all__.append('get_component')
695 def get_mapped_component(component_name, session=None):
696 """get component after mappings
698 Evaluate component mappings from ComponentMappings in dak.conf for the
699 given component name.
701 @todo: ansgar wants to get rid of this. It's currently only used for
704 @type component_name: str
705 @param component_name: component name
707 @param session: database session
709 @rtype: L{daklib.dbconn.Component} or C{None}
710 @return: component after applying maps or C{None}
713 for m in cnf.value_list("ComponentMappings"):
714 (src, dst) = m.split()
715 if component_name == src:
717 component = session.query(Component).filter_by(component_name=component_name).first()
720 __all__.append('get_mapped_component')
723 def get_component_names(session=None):
725 Returns list of strings of component names.
728 @return: list of strings of component names
731 return [ x.component_name for x in session.query(Component).all() ]
733 __all__.append('get_component_names')
735 ################################################################################
737 class DBConfig(object):
738 def __init__(self, *args, **kwargs):
742 return '<DBConfig %s>' % self.name
744 __all__.append('DBConfig')
746 ################################################################################
749 def get_or_set_contents_file_id(filename, session=None):
751 Returns database id for given filename.
753 If no matching file is found, a row is inserted.
755 @type filename: string
756 @param filename: The filename
757 @type session: SQLAlchemy
758 @param session: Optional SQL session object (a temporary one will be
759 generated if not supplied). If not passed, a commit will be performed at
760 the end of the function, otherwise the caller is responsible for commiting.
763 @return: the database id for the given component
766 q = session.query(ContentFilename).filter_by(filename=filename)
769 ret = q.one().cafilename_id
770 except NoResultFound:
771 cf = ContentFilename()
772 cf.filename = filename
774 session.commit_or_flush()
775 ret = cf.cafilename_id
779 __all__.append('get_or_set_contents_file_id')
782 def get_contents(suite, overridetype, section=None, session=None):
784 Returns contents for a suite / overridetype combination, limiting
785 to a section if not None.
788 @param suite: Suite object
790 @type overridetype: OverrideType
791 @param overridetype: OverrideType object
793 @type section: Section
794 @param section: Optional section object to limit results to
796 @type session: SQLAlchemy
797 @param session: Optional SQL session object (a temporary one will be
798 generated if not supplied)
801 @return: ResultsProxy object set up to return tuples of (filename, section,
805 # find me all of the contents for a given suite
806 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
810 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
811 JOIN content_file_names n ON (c.filename=n.id)
812 JOIN binaries b ON (b.id=c.binary_pkg)
813 JOIN override o ON (o.package=b.package)
814 JOIN section s ON (s.id=o.section)
815 WHERE o.suite = :suiteid AND o.type = :overridetypeid
816 AND b.type=:overridetypename"""
818 vals = {'suiteid': suite.suite_id,
819 'overridetypeid': overridetype.overridetype_id,
820 'overridetypename': overridetype.overridetype}
822 if section is not None:
823 contents_q += " AND s.id = :sectionid"
824 vals['sectionid'] = section.section_id
826 contents_q += " ORDER BY fn"
828 return session.execute(contents_q, vals)
830 __all__.append('get_contents')
832 ################################################################################
834 class ContentFilepath(object):
835 def __init__(self, *args, **kwargs):
839 return '<ContentFilepath %s>' % self.filepath
841 __all__.append('ContentFilepath')
844 def get_or_set_contents_path_id(filepath, session=None):
846 Returns database id for given path.
848 If no matching file is found, a row is inserted.
850 @type filepath: string
851 @param filepath: The filepath
853 @type session: SQLAlchemy
854 @param session: Optional SQL session object (a temporary one will be
855 generated if not supplied). If not passed, a commit will be performed at
856 the end of the function, otherwise the caller is responsible for commiting.
859 @return: the database id for the given path
862 q = session.query(ContentFilepath).filter_by(filepath=filepath)
865 ret = q.one().cafilepath_id
866 except NoResultFound:
867 cf = ContentFilepath()
868 cf.filepath = filepath
870 session.commit_or_flush()
871 ret = cf.cafilepath_id
875 __all__.append('get_or_set_contents_path_id')
877 ################################################################################
879 class ContentAssociation(object):
880 def __init__(self, *args, **kwargs):
884 return '<ContentAssociation %s>' % self.ca_id
886 __all__.append('ContentAssociation')
888 def insert_content_paths(binary_id, fullpaths, session=None):
890 Make sure given path is associated with given binary id
893 @param binary_id: the id of the binary
894 @type fullpaths: list
895 @param fullpaths: the list of paths of the file being associated with the binary
896 @type session: SQLAlchemy session
897 @param session: Optional SQLAlchemy session. If this is passed, the caller
898 is responsible for ensuring a transaction has begun and committing the
899 results or rolling back based on the result code. If not passed, a commit
900 will be performed at the end of the function, otherwise the caller is
901 responsible for commiting.
903 @return: True upon success
908 session = DBConn().session()
913 def generate_path_dicts():
914 for fullpath in fullpaths:
915 if fullpath.startswith( './' ):
916 fullpath = fullpath[2:]
918 yield {'filename':fullpath, 'id': binary_id }
920 for d in generate_path_dicts():
921 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
930 traceback.print_exc()
932 # Only rollback if we set up the session ourself
939 __all__.append('insert_content_paths')
941 ################################################################################
943 class DSCFile(object):
944 def __init__(self, *args, **kwargs):
948 return '<DSCFile %s>' % self.dscfile_id
950 __all__.append('DSCFile')
953 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
955 Returns a list of DSCFiles which may be empty
957 @type dscfile_id: int (optional)
958 @param dscfile_id: the dscfile_id of the DSCFiles to find
960 @type source_id: int (optional)
961 @param source_id: the source id related to the DSCFiles to find
963 @type poolfile_id: int (optional)
964 @param poolfile_id: the poolfile id related to the DSCFiles to find
967 @return: Possibly empty list of DSCFiles
970 q = session.query(DSCFile)
972 if dscfile_id is not None:
973 q = q.filter_by(dscfile_id=dscfile_id)
975 if source_id is not None:
976 q = q.filter_by(source_id=source_id)
978 if poolfile_id is not None:
979 q = q.filter_by(poolfile_id=poolfile_id)
983 __all__.append('get_dscfiles')
985 ################################################################################
987 class ExternalOverride(ORMObject):
988 def __init__(self, *args, **kwargs):
992 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
994 __all__.append('ExternalOverride')
996 ################################################################################
998 class PoolFile(ORMObject):
999 def __init__(self, filename = None, filesize = -1, \
1001 self.filename = filename
1002 self.filesize = filesize
1003 self.md5sum = md5sum
1007 session = DBConn().session().object_session(self)
1008 af = session.query(ArchiveFile).join(Archive) \
1009 .filter(ArchiveFile.file == self) \
1010 .order_by(Archive.tainted.desc()).first()
1014 def component(self):
1015 session = DBConn().session().object_session(self)
1016 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1017 .group_by(ArchiveFile.component_id).one()
1018 return session.query(Component).get(component_id)
1022 return os.path.basename(self.filename)
1024 def is_valid(self, filesize = -1, md5sum = None):
1025 return self.filesize == long(filesize) and self.md5sum == md5sum
1027 def properties(self):
1028 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1029 'sha256sum', 'source', 'binary', 'last_used']
1031 def not_null_constraints(self):
1032 return ['filename', 'md5sum']
1034 def identical_to(self, filename):
1036 compare size and hash with the given file
1039 @return: true if the given file has the same size and hash as this object; false otherwise
1041 st = os.stat(filename)
1042 if self.filesize != st.st_size:
1045 f = open(filename, "r")
1046 sha256sum = apt_pkg.sha256sum(f)
1047 if sha256sum != self.sha256sum:
1052 __all__.append('PoolFile')
1055 def get_poolfile_like_name(filename, session=None):
1057 Returns an array of PoolFile objects which are like the given name
1059 @type filename: string
1060 @param filename: the filename of the file to check against the DB
1063 @return: array of PoolFile objects
1066 # TODO: There must be a way of properly using bind parameters with %FOO%
1067 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1071 __all__.append('get_poolfile_like_name')
1073 ################################################################################
1075 class Fingerprint(ORMObject):
1076 def __init__(self, fingerprint = None):
1077 self.fingerprint = fingerprint
1079 def properties(self):
1080 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1083 def not_null_constraints(self):
1084 return ['fingerprint']
1086 __all__.append('Fingerprint')
1089 def get_fingerprint(fpr, session=None):
1091 Returns Fingerprint object for given fpr.
1094 @param fpr: The fpr to find / add
1096 @type session: SQLAlchemy
1097 @param session: Optional SQL session object (a temporary one will be
1098 generated if not supplied).
1101 @return: the Fingerprint object for the given fpr or None
1104 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1108 except NoResultFound:
1113 __all__.append('get_fingerprint')
1116 def get_or_set_fingerprint(fpr, session=None):
1118 Returns Fingerprint object for given fpr.
1120 If no matching fpr is found, a row is inserted.
1123 @param fpr: The fpr to find / add
1125 @type session: SQLAlchemy
1126 @param session: Optional SQL session object (a temporary one will be
1127 generated if not supplied). If not passed, a commit will be performed at
1128 the end of the function, otherwise the caller is responsible for commiting.
1129 A flush will be performed either way.
1132 @return: the Fingerprint object for the given fpr
1135 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1139 except NoResultFound:
1140 fingerprint = Fingerprint()
1141 fingerprint.fingerprint = fpr
1142 session.add(fingerprint)
1143 session.commit_or_flush()
1148 __all__.append('get_or_set_fingerprint')
1150 ################################################################################
1152 # Helper routine for Keyring class
1153 def get_ldap_name(entry):
1155 for k in ["cn", "mn", "sn"]:
1157 if ret and ret[0] != "" and ret[0] != "-":
1159 return " ".join(name)
1161 ################################################################################
1163 class Keyring(object):
1167 def __init__(self, *args, **kwargs):
1171 return '<Keyring %s>' % self.keyring_name
1173 def de_escape_gpg_str(self, txt):
1174 esclist = re.split(r'(\\x..)', txt)
1175 for x in range(1,len(esclist),2):
1176 esclist[x] = "%c" % (int(esclist[x][2:],16))
1177 return "".join(esclist)
1179 def parse_address(self, uid):
1180 """parses uid and returns a tuple of real name and email address"""
1182 (name, address) = email.Utils.parseaddr(uid)
1183 name = re.sub(r"\s*[(].*[)]", "", name)
1184 name = self.de_escape_gpg_str(name)
1187 return (name, address)
1189 def load_keys(self, keyring):
1190 if not self.keyring_id:
1191 raise Exception('Must be initialized with database information')
1193 cmd = ["gpg", "--no-default-keyring", "--keyring", keyring,
1194 "--with-colons", "--fingerprint", "--fingerprint"]
1195 p = daklib.daksubprocess.Popen(cmd, stdout=subprocess.PIPE)
1198 need_fingerprint = False
1200 for line in p.stdout:
1201 field = line.split(":")
1202 if field[0] == "pub":
1205 (name, addr) = self.parse_address(field[9])
1207 self.keys[key]["email"] = addr
1208 self.keys[key]["name"] = name
1209 need_fingerprint = True
1210 elif key and field[0] == "uid":
1211 (name, addr) = self.parse_address(field[9])
1212 if "email" not in self.keys[key] and "@" in addr:
1213 self.keys[key]["email"] = addr
1214 self.keys[key]["name"] = name
1215 elif need_fingerprint and field[0] == "fpr":
1216 self.keys[key]["fingerprints"] = [field[9]]
1217 self.fpr_lookup[field[9]] = key
1218 need_fingerprint = False
1222 raise subprocess.CalledProcessError(r, cmd)
1224 def import_users_from_ldap(self, session):
1228 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1229 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1230 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1232 l = ldap.open(LDAPServer)
1235 l.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1236 l.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1237 l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1240 l.simple_bind_s("","")
1241 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1242 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1243 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1245 ldap_fin_uid_id = {}
1252 uid = entry["uid"][0]
1253 name = get_ldap_name(entry)
1254 fingerprints = entry["keyFingerPrint"]
1256 for f in fingerprints:
1257 key = self.fpr_lookup.get(f, None)
1258 if key not in self.keys:
1260 self.keys[key]["uid"] = uid
1264 keyid = get_or_set_uid(uid, session).uid_id
1265 byuid[keyid] = (uid, name)
1266 byname[uid] = (keyid, name)
1268 return (byname, byuid)
1270 def generate_users_from_keyring(self, format, session):
1274 for x in self.keys.keys():
1275 if "email" not in self.keys[x]:
1277 self.keys[x]["uid"] = format % "invalid-uid"
1279 uid = format % self.keys[x]["email"]
1280 keyid = get_or_set_uid(uid, session).uid_id
1281 byuid[keyid] = (uid, self.keys[x]["name"])
1282 byname[uid] = (keyid, self.keys[x]["name"])
1283 self.keys[x]["uid"] = uid
1286 uid = format % "invalid-uid"
1287 keyid = get_or_set_uid(uid, session).uid_id
1288 byuid[keyid] = (uid, "ungeneratable user id")
1289 byname[uid] = (keyid, "ungeneratable user id")
1291 return (byname, byuid)
1293 __all__.append('Keyring')
1296 def get_keyring(keyring, session=None):
1298 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1299 If C{keyring} already has an entry, simply return the existing Keyring
1301 @type keyring: string
1302 @param keyring: the keyring name
1305 @return: the Keyring object for this keyring
1308 q = session.query(Keyring).filter_by(keyring_name=keyring)
1312 except NoResultFound:
1315 __all__.append('get_keyring')
1318 def get_active_keyring_paths(session=None):
1321 @return: list of active keyring paths
1323 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1325 __all__.append('get_active_keyring_paths')
1328 def get_primary_keyring_path(session=None):
1330 Get the full path to the highest priority active keyring
1333 @return: path to the active keyring with the highest priority or None if no
1334 keyring is configured
1336 keyrings = get_active_keyring_paths()
1338 if len(keyrings) > 0:
1343 __all__.append('get_primary_keyring_path')
1345 ################################################################################
1347 class DBChange(object):
1348 def __init__(self, *args, **kwargs):
1352 return '<DBChange %s>' % self.changesname
1354 __all__.append('DBChange')
1357 def get_dbchange(filename, session=None):
1359 returns DBChange object for given C{filename}.
1361 @type filename: string
1362 @param filename: the name of the file
1364 @type session: Session
1365 @param session: Optional SQLA session object (a temporary one will be
1366 generated if not supplied)
1369 @return: DBChange object for the given filename (C{None} if not present)
1372 q = session.query(DBChange).filter_by(changesname=filename)
1376 except NoResultFound:
1379 __all__.append('get_dbchange')
1381 ################################################################################
1383 class Maintainer(ORMObject):
1384 def __init__(self, name = None):
1387 def properties(self):
1388 return ['name', 'maintainer_id']
1390 def not_null_constraints(self):
1393 def get_split_maintainer(self):
1394 if not hasattr(self, 'name') or self.name is None:
1395 return ('', '', '', '')
1397 return fix_maintainer(self.name.strip())
1399 __all__.append('Maintainer')
1402 def get_or_set_maintainer(name, session=None):
1404 Returns Maintainer object for given maintainer name.
1406 If no matching maintainer name is found, a row is inserted.
1409 @param name: The maintainer name to add
1411 @type session: SQLAlchemy
1412 @param session: Optional SQL session object (a temporary one will be
1413 generated if not supplied). If not passed, a commit will be performed at
1414 the end of the function, otherwise the caller is responsible for commiting.
1415 A flush will be performed either way.
1418 @return: the Maintainer object for the given maintainer
1421 q = session.query(Maintainer).filter_by(name=name)
1424 except NoResultFound:
1425 maintainer = Maintainer()
1426 maintainer.name = name
1427 session.add(maintainer)
1428 session.commit_or_flush()
1433 __all__.append('get_or_set_maintainer')
1436 def get_maintainer(maintainer_id, session=None):
1438 Return the name of the maintainer behind C{maintainer_id} or None if that
1439 maintainer_id is invalid.
1441 @type maintainer_id: int
1442 @param maintainer_id: the id of the maintainer
1445 @return: the Maintainer with this C{maintainer_id}
1448 return session.query(Maintainer).get(maintainer_id)
1450 __all__.append('get_maintainer')
1452 ################################################################################
1454 class NewComment(object):
1455 def __init__(self, *args, **kwargs):
1459 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1461 __all__.append('NewComment')
1464 def has_new_comment(policy_queue, package, version, session=None):
1466 Returns true if the given combination of C{package}, C{version} has a comment.
1468 @type package: string
1469 @param package: name of the package
1471 @type version: string
1472 @param version: package version
1474 @type session: Session
1475 @param session: Optional SQLA session object (a temporary one will be
1476 generated if not supplied)
1482 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1483 q = q.filter_by(package=package)
1484 q = q.filter_by(version=version)
1486 return bool(q.count() > 0)
1488 __all__.append('has_new_comment')
1491 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1493 Returns (possibly empty) list of NewComment objects for the given
1496 @type package: string (optional)
1497 @param package: name of the package
1499 @type version: string (optional)
1500 @param version: package version
1502 @type comment_id: int (optional)
1503 @param comment_id: An id of a comment
1505 @type session: Session
1506 @param session: Optional SQLA session object (a temporary one will be
1507 generated if not supplied)
1510 @return: A (possibly empty) list of NewComment objects will be returned
1513 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1514 if package is not None: q = q.filter_by(package=package)
1515 if version is not None: q = q.filter_by(version=version)
1516 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1520 __all__.append('get_new_comments')
1522 ################################################################################
1524 class Override(ORMObject):
1525 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1526 section = None, priority = None):
1527 self.package = package
1529 self.component = component
1530 self.overridetype = overridetype
1531 self.section = section
1532 self.priority = priority
1534 def properties(self):
1535 return ['package', 'suite', 'component', 'overridetype', 'section', \
1538 def not_null_constraints(self):
1539 return ['package', 'suite', 'component', 'overridetype', 'section']
1541 __all__.append('Override')
1544 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1546 Returns Override object for the given parameters
1548 @type package: string
1549 @param package: The name of the package
1551 @type suite: string, list or None
1552 @param suite: The name of the suite (or suites if a list) to limit to. If
1553 None, don't limit. Defaults to None.
1555 @type component: string, list or None
1556 @param component: The name of the component (or components if a list) to
1557 limit to. If None, don't limit. Defaults to None.
1559 @type overridetype: string, list or None
1560 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1561 limit to. If None, don't limit. Defaults to None.
1563 @type session: Session
1564 @param session: Optional SQLA session object (a temporary one will be
1565 generated if not supplied)
1568 @return: A (possibly empty) list of Override objects will be returned
1571 q = session.query(Override)
1572 q = q.filter_by(package=package)
1574 if suite is not None:
1575 if not isinstance(suite, list): suite = [suite]
1576 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1578 if component is not None:
1579 if not isinstance(component, list): component = [component]
1580 q = q.join(Component).filter(Component.component_name.in_(component))
1582 if overridetype is not None:
1583 if not isinstance(overridetype, list): overridetype = [overridetype]
1584 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1588 __all__.append('get_override')
1591 ################################################################################
1593 class OverrideType(ORMObject):
1594 def __init__(self, overridetype = None):
1595 self.overridetype = overridetype
1597 def properties(self):
1598 return ['overridetype', 'overridetype_id', 'overrides_count']
1600 def not_null_constraints(self):
1601 return ['overridetype']
1603 __all__.append('OverrideType')
1606 def get_override_type(override_type, session=None):
1608 Returns OverrideType object for given C{override type}.
1610 @type override_type: string
1611 @param override_type: The name of the override type
1613 @type session: Session
1614 @param session: Optional SQLA session object (a temporary one will be
1615 generated if not supplied)
1618 @return: the database id for the given override type
1621 q = session.query(OverrideType).filter_by(overridetype=override_type)
1625 except NoResultFound:
1628 __all__.append('get_override_type')
1630 ################################################################################
1632 class PolicyQueue(object):
1633 def __init__(self, *args, **kwargs):
1637 return '<PolicyQueue %s>' % self.queue_name
1639 __all__.append('PolicyQueue')
1642 def get_policy_queue(queuename, session=None):
1644 Returns PolicyQueue object for given C{queue name}
1646 @type queuename: string
1647 @param queuename: The name of the queue
1649 @type session: Session
1650 @param session: Optional SQLA session object (a temporary one will be
1651 generated if not supplied)
1654 @return: PolicyQueue object for the given queue
1657 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1661 except NoResultFound:
1664 __all__.append('get_policy_queue')
1666 ################################################################################
1668 class PolicyQueueUpload(object):
1669 def __cmp__(self, other):
1670 ret = cmp(self.changes.source, other.changes.source)
1672 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1674 if self.source is not None and other.source is None:
1676 elif self.source is None and other.source is not None:
1679 ret = cmp(self.changes.changesname, other.changes.changesname)
1682 __all__.append('PolicyQueueUpload')
1684 ################################################################################
1686 class PolicyQueueByhandFile(object):
1689 __all__.append('PolicyQueueByhandFile')
1691 ################################################################################
1693 class Priority(ORMObject):
1694 def __init__(self, priority = None, level = None):
1695 self.priority = priority
1698 def properties(self):
1699 return ['priority', 'priority_id', 'level', 'overrides_count']
1701 def not_null_constraints(self):
1702 return ['priority', 'level']
1704 def __eq__(self, val):
1705 if isinstance(val, str):
1706 return (self.priority == val)
1707 # This signals to use the normal comparison operator
1708 return NotImplemented
1710 def __ne__(self, val):
1711 if isinstance(val, str):
1712 return (self.priority != val)
1713 # This signals to use the normal comparison operator
1714 return NotImplemented
1716 __all__.append('Priority')
1719 def get_priority(priority, session=None):
1721 Returns Priority object for given C{priority name}.
1723 @type priority: string
1724 @param priority: The name of the priority
1726 @type session: Session
1727 @param session: Optional SQLA session object (a temporary one will be
1728 generated if not supplied)
1731 @return: Priority object for the given priority
1734 q = session.query(Priority).filter_by(priority=priority)
1738 except NoResultFound:
1741 __all__.append('get_priority')
1744 def get_priorities(session=None):
1746 Returns dictionary of priority names -> id mappings
1748 @type session: Session
1749 @param session: Optional SQL session object (a temporary one will be
1750 generated if not supplied)
1753 @return: dictionary of priority names -> id mappings
1757 q = session.query(Priority)
1759 ret[x.priority] = x.priority_id
1763 __all__.append('get_priorities')
1765 ################################################################################
1767 class Section(ORMObject):
1768 def __init__(self, section = None):
1769 self.section = section
1771 def properties(self):
1772 return ['section', 'section_id', 'overrides_count']
1774 def not_null_constraints(self):
1777 def __eq__(self, val):
1778 if isinstance(val, str):
1779 return (self.section == val)
1780 # This signals to use the normal comparison operator
1781 return NotImplemented
1783 def __ne__(self, val):
1784 if isinstance(val, str):
1785 return (self.section != val)
1786 # This signals to use the normal comparison operator
1787 return NotImplemented
1789 __all__.append('Section')
1792 def get_section(section, session=None):
1794 Returns Section object for given C{section name}.
1796 @type section: string
1797 @param section: The name of the section
1799 @type session: Session
1800 @param session: Optional SQLA session object (a temporary one will be
1801 generated if not supplied)
1804 @return: Section object for the given section name
1807 q = session.query(Section).filter_by(section=section)
1811 except NoResultFound:
1814 __all__.append('get_section')
1817 def get_sections(session=None):
1819 Returns dictionary of section names -> id mappings
1821 @type session: Session
1822 @param session: Optional SQL session object (a temporary one will be
1823 generated if not supplied)
1826 @return: dictionary of section names -> id mappings
1830 q = session.query(Section)
1832 ret[x.section] = x.section_id
1836 __all__.append('get_sections')
1838 ################################################################################
1840 class SignatureHistory(ORMObject):
1842 def from_signed_file(cls, signed_file):
1843 """signature history entry from signed file
1845 @type signed_file: L{daklib.gpg.SignedFile}
1846 @param signed_file: signed file
1848 @rtype: L{SignatureHistory}
1851 self.fingerprint = signed_file.primary_fingerprint
1852 self.signature_timestamp = signed_file.signature_timestamp
1853 self.contents_sha1 = signed_file.contents_sha1()
1856 def query(self, session):
1857 return session.query(SignatureHistory).filter_by(fingerprint=self.fingerprint, signature_timestamp=self.signature_timestamp, contents_sha1=self.contents_sha1).first()
1859 __all__.append('SignatureHistory')
1861 ################################################################################
1863 class SrcContents(ORMObject):
1864 def __init__(self, file = None, source = None):
1866 self.source = source
1868 def properties(self):
1869 return ['file', 'source']
1871 __all__.append('SrcContents')
1873 ################################################################################
1875 from debian.debfile import Deb822
1877 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1878 class Dak822(Deb822):
1879 def _internal_parser(self, sequence, fields=None):
1880 # The key is non-whitespace, non-colon characters before any colon.
1881 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1882 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1883 multi = re.compile(key_part + r"$")
1884 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1886 wanted_field = lambda f: fields is None or f in fields
1888 if isinstance(sequence, basestring):
1889 sequence = sequence.splitlines()
1893 for line in self.gpg_stripped_paragraph(sequence):
1894 m = single.match(line)
1897 self[curkey] = content
1899 if not wanted_field(m.group('key')):
1903 curkey = m.group('key')
1904 content = m.group('data')
1907 m = multi.match(line)
1910 self[curkey] = content
1912 if not wanted_field(m.group('key')):
1916 curkey = m.group('key')
1920 m = multidata.match(line)
1922 content += '\n' + line # XXX not m.group('data')?
1926 self[curkey] = content
1929 class DBSource(ORMObject):
1930 def __init__(self, source = None, version = None, maintainer = None, \
1931 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1932 self.source = source
1933 self.version = version
1934 self.maintainer = maintainer
1935 self.changedby = changedby
1936 self.poolfile = poolfile
1937 self.install_date = install_date
1938 self.fingerprint = fingerprint
1942 return self.source_id
1944 def properties(self):
1945 return ['source', 'source_id', 'maintainer', 'changedby', \
1946 'fingerprint', 'poolfile', 'version', 'suites_count', \
1947 'install_date', 'binaries_count', 'uploaders_count']
1949 def not_null_constraints(self):
1950 return ['source', 'version', 'install_date', 'maintainer', \
1951 'changedby', 'poolfile']
1953 def read_control_fields(self):
1955 Reads the control information from a dsc
1958 @return: fields is the dsc information in a dictionary form
1960 fullpath = self.poolfile.fullpath
1961 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1964 metadata = association_proxy('key', 'value')
1966 def scan_contents(self):
1968 Returns a set of names for non directories. The path names are
1969 normalized after converting them from either utf-8 or iso8859-1
1972 fullpath = self.poolfile.fullpath
1973 from daklib.contents import UnpackedSource
1974 unpacked = UnpackedSource(fullpath)
1976 for name in unpacked.get_all_filenames():
1977 # enforce proper utf-8 encoding
1979 name.decode('utf-8')
1980 except UnicodeDecodeError:
1981 name = name.decode('iso8859-1').encode('utf-8')
1987 session = object_session(self)
1988 query = session.query(SourceMetadata).filter_by(source=self)
1989 return MetadataProxy(session, query)
1991 __all__.append('DBSource')
1994 def source_exists(source, source_version, suites = ["any"], session=None):
1996 Ensure that source exists somewhere in the archive for the binary
1997 upload being processed.
1998 1. exact match => 1.0-3
1999 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2001 @type source: string
2002 @param source: source name
2004 @type source_version: string
2005 @param source_version: expected source version
2008 @param suites: list of suites to check in, default I{any}
2010 @type session: Session
2011 @param session: Optional SQLA session object (a temporary one will be
2012 generated if not supplied)
2015 @return: returns 1 if a source with expected version is found, otherwise 0
2022 from daklib.regexes import re_bin_only_nmu
2023 orig_source_version = re_bin_only_nmu.sub('', source_version)
2025 for suite in suites:
2026 q = session.query(DBSource).filter_by(source=source). \
2027 filter(DBSource.version.in_([source_version, orig_source_version]))
2029 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2030 s = get_suite(suite, session)
2032 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2033 considered_suites = [ vc.reference for vc in enhances_vcs ]
2034 considered_suites.append(s)
2036 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2041 # No source found so return not ok
2046 __all__.append('source_exists')
2049 def get_suites_source_in(source, session=None):
2051 Returns list of Suite objects which given C{source} name is in
2054 @param source: DBSource package name to search for
2057 @return: list of Suite objects for the given source
2060 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2062 __all__.append('get_suites_source_in')
2065 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2067 Returns list of DBSource objects for given C{source} name and other parameters
2070 @param source: DBSource package name to search for
2072 @type version: str or None
2073 @param version: DBSource version name to search for or None if not applicable
2075 @type dm_upload_allowed: bool
2076 @param dm_upload_allowed: If None, no effect. If True or False, only
2077 return packages with that dm_upload_allowed setting
2079 @type session: Session
2080 @param session: Optional SQL session object (a temporary one will be
2081 generated if not supplied)
2084 @return: list of DBSource objects for the given name (may be empty)
2087 q = session.query(DBSource).filter_by(source=source)
2089 if version is not None:
2090 q = q.filter_by(version=version)
2092 if dm_upload_allowed is not None:
2093 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2097 __all__.append('get_sources_from_name')
2099 # FIXME: This function fails badly if it finds more than 1 source package and
2100 # its implementation is trivial enough to be inlined.
2102 def get_source_in_suite(source, suite_name, session=None):
2104 Returns a DBSource object for a combination of C{source} and C{suite_name}.
2106 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2107 - B{suite_name} - a suite name, eg. I{unstable}
2109 @type source: string
2110 @param source: source package name
2112 @type suite_name: string
2113 @param suite: the suite name
2116 @return: the version for I{source} in I{suite}
2119 suite = get_suite(suite_name, session)
2123 return suite.get_sources(source).one()
2124 except NoResultFound:
2127 __all__.append('get_source_in_suite')
2130 def import_metadata_into_db(obj, session=None):
2132 This routine works on either DBBinary or DBSource objects and imports
2133 their metadata into the database
2135 fields = obj.read_control_fields()
2136 for k in fields.keys():
2139 val = str(fields[k])
2140 except UnicodeEncodeError:
2141 # Fall back to UTF-8
2143 val = fields[k].encode('utf-8')
2144 except UnicodeEncodeError:
2145 # Finally try iso8859-1
2146 val = fields[k].encode('iso8859-1')
2147 # Otherwise we allow the exception to percolate up and we cause
2148 # a reject as someone is playing silly buggers
2150 obj.metadata[get_or_set_metadatakey(k, session)] = val
2152 session.commit_or_flush()
2154 __all__.append('import_metadata_into_db')
2156 ################################################################################
2158 class SrcFormat(object):
2159 def __init__(self, *args, **kwargs):
2163 return '<SrcFormat %s>' % (self.format_name)
2165 __all__.append('SrcFormat')
2167 ################################################################################
2169 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2170 ('SuiteID', 'suite_id'),
2171 ('Version', 'version'),
2172 ('Origin', 'origin'),
2174 ('Description', 'description'),
2175 ('Untouchable', 'untouchable'),
2176 ('Announce', 'announce'),
2177 ('Codename', 'codename'),
2178 ('OverrideCodename', 'overridecodename'),
2179 ('ValidTime', 'validtime'),
2180 ('Priority', 'priority'),
2181 ('NotAutomatic', 'notautomatic'),
2182 ('CopyChanges', 'copychanges'),
2183 ('OverrideSuite', 'overridesuite')]
2185 # Why the heck don't we have any UNIQUE constraints in table suite?
2186 # TODO: Add UNIQUE constraints for appropriate columns.
2187 class Suite(ORMObject):
2188 def __init__(self, suite_name = None, version = None):
2189 self.suite_name = suite_name
2190 self.version = version
2192 def properties(self):
2193 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2196 def not_null_constraints(self):
2197 return ['suite_name']
2199 def __eq__(self, val):
2200 if isinstance(val, str):
2201 return (self.suite_name == val)
2202 # This signals to use the normal comparison operator
2203 return NotImplemented
2205 def __ne__(self, val):
2206 if isinstance(val, str):
2207 return (self.suite_name != val)
2208 # This signals to use the normal comparison operator
2209 return NotImplemented
2213 for disp, field in SUITE_FIELDS:
2214 val = getattr(self, field, None)
2216 ret.append("%s: %s" % (disp, val))
2218 return "\n".join(ret)
2220 def get_architectures(self, skipsrc=False, skipall=False):
2222 Returns list of Architecture objects
2224 @type skipsrc: boolean
2225 @param skipsrc: Whether to skip returning the 'source' architecture entry
2228 @type skipall: boolean
2229 @param skipall: Whether to skip returning the 'all' architecture entry
2233 @return: list of Architecture objects for the given name (may be empty)
2236 q = object_session(self).query(Architecture).with_parent(self)
2238 q = q.filter(Architecture.arch_string != 'source')
2240 q = q.filter(Architecture.arch_string != 'all')
2241 return q.order_by(Architecture.arch_string).all()
2243 def get_sources(self, source):
2245 Returns a query object representing DBSource that is part of C{suite}.
2247 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2249 @type source: string
2250 @param source: source package name
2252 @rtype: sqlalchemy.orm.query.Query
2253 @return: a query of DBSource
2257 session = object_session(self)
2258 return session.query(DBSource).filter_by(source = source). \
2261 def get_overridesuite(self):
2262 if self.overridesuite is None:
2265 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2269 return os.path.join(self.archive.path, 'dists', self.suite_name)
2272 def release_suite_output(self):
2273 if self.release_suite is not None:
2274 return self.release_suite
2275 return self.suite_name
2277 __all__.append('Suite')
2280 def get_suite(suite, session=None):
2282 Returns Suite object for given C{suite name}.
2285 @param suite: The name of the suite
2287 @type session: Session
2288 @param session: Optional SQLA session object (a temporary one will be
2289 generated if not supplied)
2292 @return: Suite object for the requested suite name (None if not present)
2295 # Start by looking for the dak internal name
2296 q = session.query(Suite).filter_by(suite_name=suite)
2299 except NoResultFound:
2303 q = session.query(Suite).filter_by(codename=suite)
2306 except NoResultFound:
2309 # Finally give release_suite a try
2310 q = session.query(Suite).filter_by(release_suite=suite)
2313 except NoResultFound:
2316 __all__.append('get_suite')
2318 ################################################################################
2321 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2323 Returns list of Architecture objects for given C{suite} name. The list is
2324 empty if suite does not exist.
2327 @param suite: Suite name to search for
2329 @type skipsrc: boolean
2330 @param skipsrc: Whether to skip returning the 'source' architecture entry
2333 @type skipall: boolean
2334 @param skipall: Whether to skip returning the 'all' architecture entry
2337 @type session: Session
2338 @param session: Optional SQL session object (a temporary one will be
2339 generated if not supplied)
2342 @return: list of Architecture objects for the given name (may be empty)
2346 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2347 except AttributeError:
2350 __all__.append('get_suite_architectures')
2352 ################################################################################
2354 class Uid(ORMObject):
2355 def __init__(self, uid = None, name = None):
2359 def __eq__(self, val):
2360 if isinstance(val, str):
2361 return (self.uid == val)
2362 # This signals to use the normal comparison operator
2363 return NotImplemented
2365 def __ne__(self, val):
2366 if isinstance(val, str):
2367 return (self.uid != val)
2368 # This signals to use the normal comparison operator
2369 return NotImplemented
2371 def properties(self):
2372 return ['uid', 'name', 'fingerprint']
2374 def not_null_constraints(self):
2377 __all__.append('Uid')
2380 def get_or_set_uid(uidname, session=None):
2382 Returns uid object for given uidname.
2384 If no matching uidname is found, a row is inserted.
2386 @type uidname: string
2387 @param uidname: The uid to add
2389 @type session: SQLAlchemy
2390 @param session: Optional SQL session object (a temporary one will be
2391 generated if not supplied). If not passed, a commit will be performed at
2392 the end of the function, otherwise the caller is responsible for commiting.
2395 @return: the uid object for the given uidname
2398 q = session.query(Uid).filter_by(uid=uidname)
2402 except NoResultFound:
2406 session.commit_or_flush()
2411 __all__.append('get_or_set_uid')
2414 def get_uid_from_fingerprint(fpr, session=None):
2415 q = session.query(Uid)
2416 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2420 except NoResultFound:
2423 __all__.append('get_uid_from_fingerprint')
2425 ################################################################################
2427 class MetadataKey(ORMObject):
2428 def __init__(self, key = None):
2431 def properties(self):
2434 def not_null_constraints(self):
2437 __all__.append('MetadataKey')
2440 def get_or_set_metadatakey(keyname, session=None):
2442 Returns MetadataKey object for given uidname.
2444 If no matching keyname is found, a row is inserted.
2446 @type uidname: string
2447 @param uidname: The keyname to add
2449 @type session: SQLAlchemy
2450 @param session: Optional SQL session object (a temporary one will be
2451 generated if not supplied). If not passed, a commit will be performed at
2452 the end of the function, otherwise the caller is responsible for commiting.
2455 @return: the metadatakey object for the given keyname
2458 q = session.query(MetadataKey).filter_by(key=keyname)
2462 except NoResultFound:
2463 ret = MetadataKey(keyname)
2465 session.commit_or_flush()
2469 __all__.append('get_or_set_metadatakey')
2471 ################################################################################
2473 class BinaryMetadata(ORMObject):
2474 def __init__(self, key = None, value = None, binary = None):
2477 self.binary = binary
2479 def properties(self):
2480 return ['binary', 'key', 'value']
2482 def not_null_constraints(self):
2485 __all__.append('BinaryMetadata')
2487 ################################################################################
2489 class SourceMetadata(ORMObject):
2490 def __init__(self, key = None, value = None, source = None):
2493 self.source = source
2495 def properties(self):
2496 return ['source', 'key', 'value']
2498 def not_null_constraints(self):
2501 __all__.append('SourceMetadata')
2503 ################################################################################
2505 class MetadataProxy(object):
2506 def __init__(self, session, query):
2507 self.session = session
2510 def _get(self, key):
2511 metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
2512 if metadata_key is None:
2514 metadata = self.query.filter_by(key=metadata_key).first()
2517 def __contains__(self, key):
2518 if self._get(key) is not None:
2522 def __getitem__(self, key):
2523 metadata = self._get(key)
2524 if metadata is None:
2526 return metadata.value
2528 def get(self, key, default=None):
2534 ################################################################################
2536 class VersionCheck(ORMObject):
2537 def __init__(self, *args, **kwargs):
2540 def properties(self):
2541 #return ['suite_id', 'check', 'reference_id']
2544 def not_null_constraints(self):
2545 return ['suite', 'check', 'reference']
2547 __all__.append('VersionCheck')
2550 def get_version_checks(suite_name, check = None, session = None):
2551 suite = get_suite(suite_name, session)
2553 # Make sure that what we return is iterable so that list comprehensions
2554 # involving this don't cause a traceback
2556 q = session.query(VersionCheck).filter_by(suite=suite)
2558 q = q.filter_by(check=check)
2561 __all__.append('get_version_checks')
2563 ################################################################################
2565 class DBConn(object):
2567 database module init.
2571 def __init__(self, *args, **kwargs):
2572 self.__dict__ = self.__shared_state
2574 if not getattr(self, 'initialised', False):
2575 self.initialised = True
2576 self.debug = kwargs.has_key('debug')
2579 def __setuptables(self):
2582 'acl_architecture_map',
2583 'acl_fingerprint_map',
2590 'binaries_metadata',
2598 'external_overrides',
2599 'extra_src_references',
2601 'files_archive_map',
2607 # TODO: the maintainer column in table override should be removed.
2611 'policy_queue_upload',
2612 'policy_queue_upload_binaries_map',
2613 'policy_queue_byhand_file',
2616 'signature_history',
2625 'suite_architectures',
2626 'suite_build_queue_copy',
2627 'suite_src_formats',
2633 'almost_obsolete_all_associations',
2634 'almost_obsolete_src_associations',
2635 'any_associations_source',
2636 'bin_associations_binaries',
2637 'binaries_suite_arch',
2640 'newest_all_associations',
2641 'newest_any_associations',
2643 'newest_src_association',
2644 'obsolete_all_associations',
2645 'obsolete_any_associations',
2646 'obsolete_any_by_all_associations',
2647 'obsolete_src_associations',
2650 'src_associations_bin',
2651 'src_associations_src',
2652 'suite_arch_by_name',
2655 for table_name in tables:
2656 table = Table(table_name, self.db_meta, \
2657 autoload=True, useexisting=True)
2658 setattr(self, 'tbl_%s' % table_name, table)
2660 for view_name in views:
2661 view = Table(view_name, self.db_meta, autoload=True)
2662 setattr(self, 'view_%s' % view_name, view)
2664 def __setupmappers(self):
2665 mapper(Architecture, self.tbl_architecture,
2666 properties = dict(arch_id = self.tbl_architecture.c.id,
2667 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2668 order_by=self.tbl_suite.c.suite_name,
2669 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2670 extension = validator)
2672 mapper(ACL, self.tbl_acl,
2674 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2675 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2676 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2677 per_source = relation(ACLPerSource, collection_class=set),
2680 mapper(ACLPerSource, self.tbl_acl_per_source,
2682 acl = relation(ACL),
2683 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2684 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2687 mapper(Archive, self.tbl_archive,
2688 properties = dict(archive_id = self.tbl_archive.c.id,
2689 archive_name = self.tbl_archive.c.name))
2691 mapper(ArchiveFile, self.tbl_files_archive_map,
2692 properties = dict(archive = relation(Archive, backref='files'),
2693 component = relation(Component),
2694 file = relation(PoolFile, backref='archives')))
2696 mapper(BuildQueue, self.tbl_build_queue,
2697 properties = dict(queue_id = self.tbl_build_queue.c.id,
2698 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2700 mapper(DBBinary, self.tbl_binaries,
2701 properties = dict(binary_id = self.tbl_binaries.c.id,
2702 package = self.tbl_binaries.c.package,
2703 version = self.tbl_binaries.c.version,
2704 maintainer_id = self.tbl_binaries.c.maintainer,
2705 maintainer = relation(Maintainer),
2706 source_id = self.tbl_binaries.c.source,
2707 source = relation(DBSource, backref='binaries'),
2708 arch_id = self.tbl_binaries.c.architecture,
2709 architecture = relation(Architecture),
2710 poolfile_id = self.tbl_binaries.c.file,
2711 poolfile = relation(PoolFile),
2712 binarytype = self.tbl_binaries.c.type,
2713 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2714 fingerprint = relation(Fingerprint),
2715 install_date = self.tbl_binaries.c.install_date,
2716 suites = relation(Suite, secondary=self.tbl_bin_associations,
2717 backref=backref('binaries', lazy='dynamic')),
2718 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2719 backref=backref('extra_binary_references', lazy='dynamic')),
2720 key = relation(BinaryMetadata, cascade='all',
2721 collection_class=attribute_mapped_collection('key'))),
2722 extension = validator)
2724 mapper(Component, self.tbl_component,
2725 properties = dict(component_id = self.tbl_component.c.id,
2726 component_name = self.tbl_component.c.name),
2727 extension = validator)
2729 mapper(DBConfig, self.tbl_config,
2730 properties = dict(config_id = self.tbl_config.c.id))
2732 mapper(DSCFile, self.tbl_dsc_files,
2733 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2734 source_id = self.tbl_dsc_files.c.source,
2735 source = relation(DBSource),
2736 poolfile_id = self.tbl_dsc_files.c.file,
2737 poolfile = relation(PoolFile)))
2739 mapper(ExternalOverride, self.tbl_external_overrides,
2741 suite_id = self.tbl_external_overrides.c.suite,
2742 suite = relation(Suite),
2743 component_id = self.tbl_external_overrides.c.component,
2744 component = relation(Component)))
2746 mapper(PoolFile, self.tbl_files,
2747 properties = dict(file_id = self.tbl_files.c.id,
2748 filesize = self.tbl_files.c.size),
2749 extension = validator)
2751 mapper(Fingerprint, self.tbl_fingerprint,
2752 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2753 uid_id = self.tbl_fingerprint.c.uid,
2754 uid = relation(Uid),
2755 keyring_id = self.tbl_fingerprint.c.keyring,
2756 keyring = relation(Keyring),
2757 acl = relation(ACL)),
2758 extension = validator)
2760 mapper(Keyring, self.tbl_keyrings,
2761 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2762 keyring_id = self.tbl_keyrings.c.id,
2763 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2765 mapper(DBChange, self.tbl_changes,
2766 properties = dict(change_id = self.tbl_changes.c.id,
2767 seen = self.tbl_changes.c.seen,
2768 source = self.tbl_changes.c.source,
2769 binaries = self.tbl_changes.c.binaries,
2770 architecture = self.tbl_changes.c.architecture,
2771 distribution = self.tbl_changes.c.distribution,
2772 urgency = self.tbl_changes.c.urgency,
2773 maintainer = self.tbl_changes.c.maintainer,
2774 changedby = self.tbl_changes.c.changedby,
2775 date = self.tbl_changes.c.date,
2776 version = self.tbl_changes.c.version))
2778 mapper(Maintainer, self.tbl_maintainer,
2779 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2780 maintains_sources = relation(DBSource, backref='maintainer',
2781 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2782 changed_sources = relation(DBSource, backref='changedby',
2783 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2784 extension = validator)
2786 mapper(NewComment, self.tbl_new_comments,
2787 properties = dict(comment_id = self.tbl_new_comments.c.id,
2788 policy_queue = relation(PolicyQueue)))
2790 mapper(Override, self.tbl_override,
2791 properties = dict(suite_id = self.tbl_override.c.suite,
2792 suite = relation(Suite, \
2793 backref=backref('overrides', lazy='dynamic')),
2794 package = self.tbl_override.c.package,
2795 component_id = self.tbl_override.c.component,
2796 component = relation(Component, \
2797 backref=backref('overrides', lazy='dynamic')),
2798 priority_id = self.tbl_override.c.priority,
2799 priority = relation(Priority, \
2800 backref=backref('overrides', lazy='dynamic')),
2801 section_id = self.tbl_override.c.section,
2802 section = relation(Section, \
2803 backref=backref('overrides', lazy='dynamic')),
2804 overridetype_id = self.tbl_override.c.type,
2805 overridetype = relation(OverrideType, \
2806 backref=backref('overrides', lazy='dynamic'))))
2808 mapper(OverrideType, self.tbl_override_type,
2809 properties = dict(overridetype = self.tbl_override_type.c.type,
2810 overridetype_id = self.tbl_override_type.c.id))
2812 mapper(PolicyQueue, self.tbl_policy_queue,
2813 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2814 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2816 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2818 changes = relation(DBChange),
2819 policy_queue = relation(PolicyQueue, backref='uploads'),
2820 target_suite = relation(Suite),
2821 source = relation(DBSource),
2822 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2825 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2827 upload = relation(PolicyQueueUpload, backref='byhand'),
2831 mapper(Priority, self.tbl_priority,
2832 properties = dict(priority_id = self.tbl_priority.c.id))
2834 mapper(Section, self.tbl_section,
2835 properties = dict(section_id = self.tbl_section.c.id,
2836 section=self.tbl_section.c.section))
2838 mapper(SignatureHistory, self.tbl_signature_history)
2840 mapper(DBSource, self.tbl_source,
2841 properties = dict(source_id = self.tbl_source.c.id,
2842 version = self.tbl_source.c.version,
2843 maintainer_id = self.tbl_source.c.maintainer,
2844 poolfile_id = self.tbl_source.c.file,
2845 poolfile = relation(PoolFile),
2846 fingerprint_id = self.tbl_source.c.sig_fpr,
2847 fingerprint = relation(Fingerprint),
2848 changedby_id = self.tbl_source.c.changedby,
2849 srcfiles = relation(DSCFile,
2850 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2851 suites = relation(Suite, secondary=self.tbl_src_associations,
2852 backref=backref('sources', lazy='dynamic')),
2853 uploaders = relation(Maintainer,
2854 secondary=self.tbl_src_uploaders),
2855 key = relation(SourceMetadata, cascade='all',
2856 collection_class=attribute_mapped_collection('key'))),
2857 extension = validator)
2859 mapper(SrcFormat, self.tbl_src_format,
2860 properties = dict(src_format_id = self.tbl_src_format.c.id,
2861 format_name = self.tbl_src_format.c.format_name))
2863 mapper(Suite, self.tbl_suite,
2864 properties = dict(suite_id = self.tbl_suite.c.id,
2865 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2866 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2867 copy_queues = relation(BuildQueue,
2868 secondary=self.tbl_suite_build_queue_copy),
2869 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2870 backref=backref('suites', lazy='dynamic')),
2871 archive = relation(Archive, backref='suites'),
2872 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2873 components = relation(Component, secondary=self.tbl_component_suite,
2874 order_by=self.tbl_component.c.ordering,
2875 backref=backref('suites'))),
2876 extension = validator)
2878 mapper(Uid, self.tbl_uid,
2879 properties = dict(uid_id = self.tbl_uid.c.id,
2880 fingerprint = relation(Fingerprint)),
2881 extension = validator)
2883 mapper(BinContents, self.tbl_bin_contents,
2885 binary = relation(DBBinary,
2886 backref=backref('contents', lazy='dynamic', cascade='all')),
2887 file = self.tbl_bin_contents.c.file))
2889 mapper(SrcContents, self.tbl_src_contents,
2891 source = relation(DBSource,
2892 backref=backref('contents', lazy='dynamic', cascade='all')),
2893 file = self.tbl_src_contents.c.file))
2895 mapper(MetadataKey, self.tbl_metadata_keys,
2897 key_id = self.tbl_metadata_keys.c.key_id,
2898 key = self.tbl_metadata_keys.c.key))
2900 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2902 binary_id = self.tbl_binaries_metadata.c.bin_id,
2903 binary = relation(DBBinary),
2904 key_id = self.tbl_binaries_metadata.c.key_id,
2905 key = relation(MetadataKey),
2906 value = self.tbl_binaries_metadata.c.value))
2908 mapper(SourceMetadata, self.tbl_source_metadata,
2910 source_id = self.tbl_source_metadata.c.src_id,
2911 source = relation(DBSource),
2912 key_id = self.tbl_source_metadata.c.key_id,
2913 key = relation(MetadataKey),
2914 value = self.tbl_source_metadata.c.value))
2916 mapper(VersionCheck, self.tbl_version_check,
2918 suite_id = self.tbl_version_check.c.suite,
2919 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2920 reference_id = self.tbl_version_check.c.reference,
2921 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2923 ## Connection functions
2924 def __createconn(self):
2925 from config import Config
2927 if cnf.has_key("DB::Service"):
2928 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2929 elif cnf.has_key("DB::Host"):
2931 connstr = "postgresql://%s" % cnf["DB::Host"]
2932 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2933 connstr += ":%s" % cnf["DB::Port"]
2934 connstr += "/%s" % cnf["DB::Name"]
2937 connstr = "postgresql:///%s" % cnf["DB::Name"]
2938 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2939 connstr += "?port=%s" % cnf["DB::Port"]
2941 engine_args = { 'echo': self.debug }
2942 if cnf.has_key('DB::PoolSize'):
2943 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2944 if cnf.has_key('DB::MaxOverflow'):
2945 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2946 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2947 cnf['DB::Unicode'] == 'false':
2948 engine_args['use_native_unicode'] = False
2950 # Monkey patch a new dialect in in order to support service= syntax
2951 import sqlalchemy.dialects.postgresql
2952 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2953 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2954 def create_connect_args(self, url):
2955 if str(url).startswith('postgresql://service='):
2957 servicename = str(url)[21:]
2958 return (['service=%s' % servicename], {})
2960 return PGDialect_psycopg2.create_connect_args(self, url)
2962 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2965 self.db_pg = create_engine(connstr, **engine_args)
2966 self.db_meta = MetaData()
2967 self.db_meta.bind = self.db_pg
2968 self.db_smaker = sessionmaker(bind=self.db_pg,
2972 self.__setuptables()
2973 self.__setupmappers()
2975 except OperationalError as e:
2977 utils.fubar("Cannot connect to database (%s)" % str(e))
2979 self.pid = os.getpid()
2981 def session(self, work_mem = 0):
2983 Returns a new session object. If a work_mem parameter is provided a new
2984 transaction is started and the work_mem parameter is set for this
2985 transaction. The work_mem parameter is measured in MB. A default value
2986 will be used if the parameter is not set.
2988 # reinitialize DBConn in new processes
2989 if self.pid != os.getpid():
2992 session = self.db_smaker()
2994 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2997 __all__.append('DBConn')