5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 warnings.filterwarnings('ignore', \
85 "Predicate of partial index .* ignored during reflection", \
89 ################################################################################
91 # Patch in support for the debversion field type so that it works during
95 # that is for sqlalchemy 0.6
96 UserDefinedType = sqltypes.UserDefinedType
98 # this one for sqlalchemy 0.5
99 UserDefinedType = sqltypes.TypeEngine
101 class DebVersion(UserDefinedType):
102 def get_col_spec(self):
105 def bind_processor(self, dialect):
108 # ' = None' is needed for sqlalchemy 0.5:
109 def result_processor(self, dialect, coltype = None):
112 sa_major_version = sqlalchemy.__version__[0:3]
113 if sa_major_version in ["0.5", "0.6", "0.7", "0.8"]:
114 from sqlalchemy.databases import postgres
115 postgres.ischema_names['debversion'] = DebVersion
117 raise Exception("dak only ported to SQLA versions 0.5 to 0.8. See daklib/dbconn.py")
119 ################################################################################
121 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
123 ################################################################################
125 def session_wrapper(fn):
127 Wrapper around common ".., session=None):" handling. If the wrapped
128 function is called without passing 'session', we create a local one
129 and destroy it when the function ends.
131 Also attaches a commit_or_flush method to the session; if we created a
132 local session, this is a synonym for session.commit(), otherwise it is a
133 synonym for session.flush().
136 def wrapped(*args, **kwargs):
137 private_transaction = False
139 # Find the session object
140 session = kwargs.get('session')
143 if len(args) <= len(getargspec(fn)[0]) - 1:
144 # No session specified as last argument or in kwargs
145 private_transaction = True
146 session = kwargs['session'] = DBConn().session()
148 # Session is last argument in args
152 session = args[-1] = DBConn().session()
153 private_transaction = True
155 if private_transaction:
156 session.commit_or_flush = session.commit
158 session.commit_or_flush = session.flush
161 return fn(*args, **kwargs)
163 if private_transaction:
164 # We created a session; close it.
167 wrapped.__doc__ = fn.__doc__
168 wrapped.func_name = fn.func_name
172 __all__.append('session_wrapper')
174 ################################################################################
176 class ORMObject(object):
178 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
179 derived classes must implement the properties() method.
182 def properties(self):
184 This method should be implemented by all derived classes and returns a
185 list of the important properties. The properties 'created' and
186 'modified' will be added automatically. A suffix '_count' should be
187 added to properties that are lists or query objects. The most important
188 property name should be returned as the first element in the list
189 because it is used by repr().
195 Returns a JSON representation of the object based on the properties
196 returned from the properties() method.
199 # add created and modified
200 all_properties = self.properties() + ['created', 'modified']
201 for property in all_properties:
202 # check for list or query
203 if property[-6:] == '_count':
204 real_property = property[:-6]
205 if not hasattr(self, real_property):
207 value = getattr(self, real_property)
208 if hasattr(value, '__len__'):
211 elif hasattr(value, 'count'):
212 # query (but not during validation)
213 if self.in_validation:
215 value = value.count()
217 raise KeyError('Do not understand property %s.' % property)
219 if not hasattr(self, property):
222 value = getattr(self, property)
226 elif isinstance(value, ORMObject):
227 # use repr() for ORMObject types
230 # we want a string for all other types because json cannot
233 data[property] = value
234 return json.dumps(data)
238 Returns the name of the class.
240 return type(self).__name__
244 Returns a short string representation of the object using the first
245 element from the properties() method.
247 primary_property = self.properties()[0]
248 value = getattr(self, primary_property)
249 return '<%s %s>' % (self.classname(), str(value))
253 Returns a human readable form of the object using the properties()
256 return '<%s %s>' % (self.classname(), self.json())
258 def not_null_constraints(self):
260 Returns a list of properties that must be not NULL. Derived classes
261 should override this method if needed.
265 validation_message = \
266 "Validation failed because property '%s' must not be empty in object\n%s"
268 in_validation = False
272 This function validates the not NULL constraints as returned by
273 not_null_constraints(). It raises the DBUpdateError exception if
276 for property in self.not_null_constraints():
277 # TODO: It is a bit awkward that the mapper configuration allow
278 # directly setting the numeric _id columns. We should get rid of it
280 if hasattr(self, property + '_id') and \
281 getattr(self, property + '_id') is not None:
283 if not hasattr(self, property) or getattr(self, property) is None:
284 # str() might lead to races due to a 2nd flush
285 self.in_validation = True
286 message = self.validation_message % (property, str(self))
287 self.in_validation = False
288 raise DBUpdateError(message)
292 def get(cls, primary_key, session = None):
294 This is a support function that allows getting an object by its primary
297 Architecture.get(3[, session])
299 instead of the more verbose
301 session.query(Architecture).get(3)
303 return session.query(cls).get(primary_key)
305 def session(self, replace = False):
307 Returns the current session that is associated with the object. May
308 return None is object is in detached state.
311 return object_session(self)
313 def clone(self, session = None):
315 Clones the current object in a new session and returns the new clone. A
316 fresh session is created if the optional session parameter is not
317 provided. The function will fail if a session is provided and has
320 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
321 an existing object to allow several threads to work with their own
322 instances of an ORMObject.
324 WARNING: Only persistent (committed) objects can be cloned. Changes
325 made to the original object that are not committed yet will get lost.
326 The session of the new object will always be rolled back to avoid
330 if self.session() is None:
331 raise RuntimeError( \
332 'Method clone() failed for detached object:\n%s' % self)
333 self.session().flush()
334 mapper = object_mapper(self)
335 primary_key = mapper.primary_key_from_instance(self)
336 object_class = self.__class__
338 session = DBConn().session()
339 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
340 raise RuntimeError( \
341 'Method clone() failed due to unflushed changes in session.')
342 new_object = session.query(object_class).get(primary_key)
344 if new_object is None:
345 raise RuntimeError( \
346 'Method clone() failed for non-persistent object:\n%s' % self)
349 __all__.append('ORMObject')
351 ################################################################################
353 class Validator(MapperExtension):
355 This class calls the validate() method for each instance for the
356 'before_update' and 'before_insert' events. A global object validator is
357 used for configuring the individual mappers.
360 def before_update(self, mapper, connection, instance):
364 def before_insert(self, mapper, connection, instance):
368 validator = Validator()
370 ################################################################################
372 class ACL(ORMObject):
374 return "<ACL {0}>".format(self.name)
376 __all__.append('ACL')
378 class ACLPerSource(ORMObject):
380 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
382 __all__.append('ACLPerSource')
384 ################################################################################
386 class Architecture(ORMObject):
387 def __init__(self, arch_string = None, description = None):
388 self.arch_string = arch_string
389 self.description = description
391 def __eq__(self, val):
392 if isinstance(val, str):
393 return (self.arch_string== val)
394 # This signals to use the normal comparison operator
395 return NotImplemented
397 def __ne__(self, val):
398 if isinstance(val, str):
399 return (self.arch_string != val)
400 # This signals to use the normal comparison operator
401 return NotImplemented
403 def properties(self):
404 return ['arch_string', 'arch_id', 'suites_count']
406 def not_null_constraints(self):
407 return ['arch_string']
409 __all__.append('Architecture')
412 def get_architecture(architecture, session=None):
414 Returns database id for given C{architecture}.
416 @type architecture: string
417 @param architecture: The name of the architecture
419 @type session: Session
420 @param session: Optional SQLA session object (a temporary one will be
421 generated if not supplied)
424 @return: Architecture object for the given arch (None if not present)
427 q = session.query(Architecture).filter_by(arch_string=architecture)
431 except NoResultFound:
434 __all__.append('get_architecture')
436 # TODO: should be removed because the implementation is too trivial
438 def get_architecture_suites(architecture, session=None):
440 Returns list of Suite objects for given C{architecture} name
442 @type architecture: str
443 @param architecture: Architecture name to search for
445 @type session: Session
446 @param session: Optional SQL session object (a temporary one will be
447 generated if not supplied)
450 @return: list of Suite objects for the given name (may be empty)
453 return get_architecture(architecture, session).suites
455 __all__.append('get_architecture_suites')
457 ################################################################################
459 class Archive(object):
460 def __init__(self, *args, **kwargs):
464 return '<Archive %s>' % self.archive_name
466 __all__.append('Archive')
469 def get_archive(archive, session=None):
471 returns database id for given C{archive}.
473 @type archive: string
474 @param archive: the name of the arhive
476 @type session: Session
477 @param session: Optional SQLA session object (a temporary one will be
478 generated if not supplied)
481 @return: Archive object for the given name (None if not present)
484 archive = archive.lower()
486 q = session.query(Archive).filter_by(archive_name=archive)
490 except NoResultFound:
493 __all__.append('get_archive')
495 ################################################################################
497 class ArchiveFile(object):
498 def __init__(self, archive=None, component=None, file=None):
499 self.archive = archive
500 self.component = component
504 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
506 __all__.append('ArchiveFile')
508 ################################################################################
510 class BinContents(ORMObject):
511 def __init__(self, file = None, binary = None):
515 def properties(self):
516 return ['file', 'binary']
518 __all__.append('BinContents')
520 ################################################################################
522 def subprocess_setup():
523 # Python installs a SIGPIPE handler by default. This is usually not what
524 # non-Python subprocesses expect.
525 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
527 class DBBinary(ORMObject):
528 def __init__(self, package = None, source = None, version = None, \
529 maintainer = None, architecture = None, poolfile = None, \
530 binarytype = 'deb', fingerprint=None):
531 self.package = package
533 self.version = version
534 self.maintainer = maintainer
535 self.architecture = architecture
536 self.poolfile = poolfile
537 self.binarytype = binarytype
538 self.fingerprint = fingerprint
542 return self.binary_id
544 def properties(self):
545 return ['package', 'version', 'maintainer', 'source', 'architecture', \
546 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
547 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
549 def not_null_constraints(self):
550 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
553 metadata = association_proxy('key', 'value')
555 def scan_contents(self):
557 Yields the contents of the package. Only regular files are yielded and
558 the path names are normalized after converting them from either utf-8
559 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
560 package does not contain any regular file.
562 fullpath = self.poolfile.fullpath
563 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
564 preexec_fn = subprocess_setup)
565 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
566 for member in tar.getmembers():
567 if not member.isdir():
568 name = normpath(member.name)
569 # enforce proper utf-8 encoding
572 except UnicodeDecodeError:
573 name = name.decode('iso8859-1').encode('utf-8')
579 def read_control(self):
581 Reads the control information from a binary.
584 @return: stanza text of the control section.
587 fullpath = self.poolfile.fullpath
588 deb_file = open(fullpath, 'r')
589 stanza = utils.deb_extract_control(deb_file)
594 def read_control_fields(self):
596 Reads the control information from a binary and return
600 @return: fields of the control section as a dictionary.
603 stanza = self.read_control()
604 return apt_pkg.TagSection(stanza)
606 __all__.append('DBBinary')
609 def get_suites_binary_in(package, session=None):
611 Returns list of Suite objects which given C{package} name is in
614 @param package: DBBinary package name to search for
617 @return: list of Suite objects for the given package
620 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
622 __all__.append('get_suites_binary_in')
625 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
627 Returns the component name of the newest binary package in suite_list or
628 None if no package is found. The result can be optionally filtered by a list
629 of architecture names.
632 @param package: DBBinary package name to search for
634 @type suite_list: list of str
635 @param suite_list: list of suite_name items
637 @type arch_list: list of str
638 @param arch_list: optional list of arch_string items that defaults to []
640 @rtype: str or NoneType
641 @return: name of component or None
644 q = session.query(DBBinary).filter_by(package = package). \
645 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
646 if len(arch_list) > 0:
647 q = q.join(DBBinary.architecture). \
648 filter(Architecture.arch_string.in_(arch_list))
649 binary = q.order_by(desc(DBBinary.version)).first()
653 return binary.poolfile.component.component_name
655 __all__.append('get_component_by_package_suite')
657 ################################################################################
659 class BuildQueue(object):
660 def __init__(self, *args, **kwargs):
664 return '<BuildQueue %s>' % self.queue_name
666 __all__.append('BuildQueue')
668 ################################################################################
670 class Component(ORMObject):
671 def __init__(self, component_name = None):
672 self.component_name = component_name
674 def __eq__(self, val):
675 if isinstance(val, str):
676 return (self.component_name == val)
677 # This signals to use the normal comparison operator
678 return NotImplemented
680 def __ne__(self, val):
681 if isinstance(val, str):
682 return (self.component_name != val)
683 # This signals to use the normal comparison operator
684 return NotImplemented
686 def properties(self):
687 return ['component_name', 'component_id', 'description', \
688 'meets_dfsg', 'overrides_count']
690 def not_null_constraints(self):
691 return ['component_name']
694 __all__.append('Component')
697 def get_component(component, session=None):
699 Returns database id for given C{component}.
701 @type component: string
702 @param component: The name of the override type
705 @return: the database id for the given component
708 component = component.lower()
710 q = session.query(Component).filter_by(component_name=component)
714 except NoResultFound:
717 __all__.append('get_component')
720 def get_mapped_component(component_name, session=None):
721 """get component after mappings
723 Evaluate component mappings from ComponentMappings in dak.conf for the
724 given component name.
726 @todo: ansgar wants to get rid of this. It's currently only used for
729 @type component_name: str
730 @param component_name: component name
732 @param session: database session
734 @rtype: L{daklib.dbconn.Component} or C{None}
735 @return: component after applying maps or C{None}
738 for m in cnf.value_list("ComponentMappings"):
739 (src, dst) = m.split()
740 if component_name == src:
742 component = session.query(Component).filter_by(component_name=component_name).first()
745 __all__.append('get_mapped_component')
748 def get_component_names(session=None):
750 Returns list of strings of component names.
753 @return: list of strings of component names
756 return [ x.component_name for x in session.query(Component).all() ]
758 __all__.append('get_component_names')
760 ################################################################################
762 class DBConfig(object):
763 def __init__(self, *args, **kwargs):
767 return '<DBConfig %s>' % self.name
769 __all__.append('DBConfig')
771 ################################################################################
774 def get_or_set_contents_file_id(filename, session=None):
776 Returns database id for given filename.
778 If no matching file is found, a row is inserted.
780 @type filename: string
781 @param filename: The filename
782 @type session: SQLAlchemy
783 @param session: Optional SQL session object (a temporary one will be
784 generated if not supplied). If not passed, a commit will be performed at
785 the end of the function, otherwise the caller is responsible for commiting.
788 @return: the database id for the given component
791 q = session.query(ContentFilename).filter_by(filename=filename)
794 ret = q.one().cafilename_id
795 except NoResultFound:
796 cf = ContentFilename()
797 cf.filename = filename
799 session.commit_or_flush()
800 ret = cf.cafilename_id
804 __all__.append('get_or_set_contents_file_id')
807 def get_contents(suite, overridetype, section=None, session=None):
809 Returns contents for a suite / overridetype combination, limiting
810 to a section if not None.
813 @param suite: Suite object
815 @type overridetype: OverrideType
816 @param overridetype: OverrideType object
818 @type section: Section
819 @param section: Optional section object to limit results to
821 @type session: SQLAlchemy
822 @param session: Optional SQL session object (a temporary one will be
823 generated if not supplied)
826 @return: ResultsProxy object set up to return tuples of (filename, section,
830 # find me all of the contents for a given suite
831 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
835 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
836 JOIN content_file_names n ON (c.filename=n.id)
837 JOIN binaries b ON (b.id=c.binary_pkg)
838 JOIN override o ON (o.package=b.package)
839 JOIN section s ON (s.id=o.section)
840 WHERE o.suite = :suiteid AND o.type = :overridetypeid
841 AND b.type=:overridetypename"""
843 vals = {'suiteid': suite.suite_id,
844 'overridetypeid': overridetype.overridetype_id,
845 'overridetypename': overridetype.overridetype}
847 if section is not None:
848 contents_q += " AND s.id = :sectionid"
849 vals['sectionid'] = section.section_id
851 contents_q += " ORDER BY fn"
853 return session.execute(contents_q, vals)
855 __all__.append('get_contents')
857 ################################################################################
859 class ContentFilepath(object):
860 def __init__(self, *args, **kwargs):
864 return '<ContentFilepath %s>' % self.filepath
866 __all__.append('ContentFilepath')
869 def get_or_set_contents_path_id(filepath, session=None):
871 Returns database id for given path.
873 If no matching file is found, a row is inserted.
875 @type filepath: string
876 @param filepath: The filepath
878 @type session: SQLAlchemy
879 @param session: Optional SQL session object (a temporary one will be
880 generated if not supplied). If not passed, a commit will be performed at
881 the end of the function, otherwise the caller is responsible for commiting.
884 @return: the database id for the given path
887 q = session.query(ContentFilepath).filter_by(filepath=filepath)
890 ret = q.one().cafilepath_id
891 except NoResultFound:
892 cf = ContentFilepath()
893 cf.filepath = filepath
895 session.commit_or_flush()
896 ret = cf.cafilepath_id
900 __all__.append('get_or_set_contents_path_id')
902 ################################################################################
904 class ContentAssociation(object):
905 def __init__(self, *args, **kwargs):
909 return '<ContentAssociation %s>' % self.ca_id
911 __all__.append('ContentAssociation')
913 def insert_content_paths(binary_id, fullpaths, session=None):
915 Make sure given path is associated with given binary id
918 @param binary_id: the id of the binary
919 @type fullpaths: list
920 @param fullpaths: the list of paths of the file being associated with the binary
921 @type session: SQLAlchemy session
922 @param session: Optional SQLAlchemy session. If this is passed, the caller
923 is responsible for ensuring a transaction has begun and committing the
924 results or rolling back based on the result code. If not passed, a commit
925 will be performed at the end of the function, otherwise the caller is
926 responsible for commiting.
928 @return: True upon success
933 session = DBConn().session()
938 def generate_path_dicts():
939 for fullpath in fullpaths:
940 if fullpath.startswith( './' ):
941 fullpath = fullpath[2:]
943 yield {'filename':fullpath, 'id': binary_id }
945 for d in generate_path_dicts():
946 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
955 traceback.print_exc()
957 # Only rollback if we set up the session ourself
964 __all__.append('insert_content_paths')
966 ################################################################################
968 class DSCFile(object):
969 def __init__(self, *args, **kwargs):
973 return '<DSCFile %s>' % self.dscfile_id
975 __all__.append('DSCFile')
978 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
980 Returns a list of DSCFiles which may be empty
982 @type dscfile_id: int (optional)
983 @param dscfile_id: the dscfile_id of the DSCFiles to find
985 @type source_id: int (optional)
986 @param source_id: the source id related to the DSCFiles to find
988 @type poolfile_id: int (optional)
989 @param poolfile_id: the poolfile id related to the DSCFiles to find
992 @return: Possibly empty list of DSCFiles
995 q = session.query(DSCFile)
997 if dscfile_id is not None:
998 q = q.filter_by(dscfile_id=dscfile_id)
1000 if source_id is not None:
1001 q = q.filter_by(source_id=source_id)
1003 if poolfile_id is not None:
1004 q = q.filter_by(poolfile_id=poolfile_id)
1008 __all__.append('get_dscfiles')
1010 ################################################################################
1012 class ExternalOverride(ORMObject):
1013 def __init__(self, *args, **kwargs):
1017 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1019 __all__.append('ExternalOverride')
1021 ################################################################################
1023 class PoolFile(ORMObject):
1024 def __init__(self, filename = None, filesize = -1, \
1026 self.filename = filename
1027 self.filesize = filesize
1028 self.md5sum = md5sum
1032 session = DBConn().session().object_session(self)
1033 af = session.query(ArchiveFile).join(Archive) \
1034 .filter(ArchiveFile.file == self) \
1035 .order_by(Archive.tainted.desc()).first()
1039 def component(self):
1040 session = DBConn().session().object_session(self)
1041 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1042 .group_by(ArchiveFile.component_id).one()
1043 return session.query(Component).get(component_id)
1047 return os.path.basename(self.filename)
1049 def is_valid(self, filesize = -1, md5sum = None):
1050 return self.filesize == long(filesize) and self.md5sum == md5sum
1052 def properties(self):
1053 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1054 'sha256sum', 'source', 'binary', 'last_used']
1056 def not_null_constraints(self):
1057 return ['filename', 'md5sum']
1059 def identical_to(self, filename):
1061 compare size and hash with the given file
1064 @return: true if the given file has the same size and hash as this object; false otherwise
1066 st = os.stat(filename)
1067 if self.filesize != st.st_size:
1070 f = open(filename, "r")
1071 sha256sum = apt_pkg.sha256sum(f)
1072 if sha256sum != self.sha256sum:
1077 __all__.append('PoolFile')
1080 def get_poolfile_like_name(filename, session=None):
1082 Returns an array of PoolFile objects which are like the given name
1084 @type filename: string
1085 @param filename: the filename of the file to check against the DB
1088 @return: array of PoolFile objects
1091 # TODO: There must be a way of properly using bind parameters with %FOO%
1092 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1096 __all__.append('get_poolfile_like_name')
1098 ################################################################################
1100 class Fingerprint(ORMObject):
1101 def __init__(self, fingerprint = None):
1102 self.fingerprint = fingerprint
1104 def properties(self):
1105 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1108 def not_null_constraints(self):
1109 return ['fingerprint']
1111 __all__.append('Fingerprint')
1114 def get_fingerprint(fpr, session=None):
1116 Returns Fingerprint object for given fpr.
1119 @param fpr: The fpr to find / add
1121 @type session: SQLAlchemy
1122 @param session: Optional SQL session object (a temporary one will be
1123 generated if not supplied).
1126 @return: the Fingerprint object for the given fpr or None
1129 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1133 except NoResultFound:
1138 __all__.append('get_fingerprint')
1141 def get_or_set_fingerprint(fpr, session=None):
1143 Returns Fingerprint object for given fpr.
1145 If no matching fpr is found, a row is inserted.
1148 @param fpr: The fpr to find / add
1150 @type session: SQLAlchemy
1151 @param session: Optional SQL session object (a temporary one will be
1152 generated if not supplied). If not passed, a commit will be performed at
1153 the end of the function, otherwise the caller is responsible for commiting.
1154 A flush will be performed either way.
1157 @return: the Fingerprint object for the given fpr
1160 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1164 except NoResultFound:
1165 fingerprint = Fingerprint()
1166 fingerprint.fingerprint = fpr
1167 session.add(fingerprint)
1168 session.commit_or_flush()
1173 __all__.append('get_or_set_fingerprint')
1175 ################################################################################
1177 # Helper routine for Keyring class
1178 def get_ldap_name(entry):
1180 for k in ["cn", "mn", "sn"]:
1182 if ret and ret[0] != "" and ret[0] != "-":
1184 return " ".join(name)
1186 ################################################################################
1188 class Keyring(object):
1189 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1190 " --with-colons --fingerprint --fingerprint"
1195 def __init__(self, *args, **kwargs):
1199 return '<Keyring %s>' % self.keyring_name
1201 def de_escape_gpg_str(self, txt):
1202 esclist = re.split(r'(\\x..)', txt)
1203 for x in range(1,len(esclist),2):
1204 esclist[x] = "%c" % (int(esclist[x][2:],16))
1205 return "".join(esclist)
1207 def parse_address(self, uid):
1208 """parses uid and returns a tuple of real name and email address"""
1210 (name, address) = email.Utils.parseaddr(uid)
1211 name = re.sub(r"\s*[(].*[)]", "", name)
1212 name = self.de_escape_gpg_str(name)
1215 return (name, address)
1217 def load_keys(self, keyring):
1218 if not self.keyring_id:
1219 raise Exception('Must be initialized with database information')
1221 k = os.popen(self.gpg_invocation % keyring, "r")
1226 field = line.split(":")
1227 if field[0] == "pub":
1230 (name, addr) = self.parse_address(field[9])
1232 self.keys[key]["email"] = addr
1233 self.keys[key]["name"] = name
1234 self.keys[key]["fingerprints"] = []
1236 elif key and field[0] == "sub" and len(field) >= 12:
1237 signingkey = ("s" in field[11])
1238 elif key and field[0] == "uid":
1239 (name, addr) = self.parse_address(field[9])
1240 if "email" not in self.keys[key] and "@" in addr:
1241 self.keys[key]["email"] = addr
1242 self.keys[key]["name"] = name
1243 elif signingkey and field[0] == "fpr":
1244 self.keys[key]["fingerprints"].append(field[9])
1245 self.fpr_lookup[field[9]] = key
1247 def import_users_from_ldap(self, session):
1251 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1252 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1253 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1255 l = ldap.open(LDAPServer)
1258 # TODO: This should request a new context and use
1259 # connection-specific options (i.e. "l.set_option(...)")
1261 # Request a new TLS context. If there was already one, libldap
1262 # would not change the TLS options (like which CAs to trust).
1263 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1264 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1265 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1266 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1269 l.simple_bind_s("","")
1270 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1271 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1272 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1274 ldap_fin_uid_id = {}
1281 uid = entry["uid"][0]
1282 name = get_ldap_name(entry)
1283 fingerprints = entry["keyFingerPrint"]
1285 for f in fingerprints:
1286 key = self.fpr_lookup.get(f, None)
1287 if key not in self.keys:
1289 self.keys[key]["uid"] = uid
1293 keyid = get_or_set_uid(uid, session).uid_id
1294 byuid[keyid] = (uid, name)
1295 byname[uid] = (keyid, name)
1297 return (byname, byuid)
1299 def generate_users_from_keyring(self, format, session):
1303 for x in self.keys.keys():
1304 if "email" not in self.keys[x]:
1306 self.keys[x]["uid"] = format % "invalid-uid"
1308 uid = format % self.keys[x]["email"]
1309 keyid = get_or_set_uid(uid, session).uid_id
1310 byuid[keyid] = (uid, self.keys[x]["name"])
1311 byname[uid] = (keyid, self.keys[x]["name"])
1312 self.keys[x]["uid"] = uid
1315 uid = format % "invalid-uid"
1316 keyid = get_or_set_uid(uid, session).uid_id
1317 byuid[keyid] = (uid, "ungeneratable user id")
1318 byname[uid] = (keyid, "ungeneratable user id")
1320 return (byname, byuid)
1322 __all__.append('Keyring')
1325 def get_keyring(keyring, session=None):
1327 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1328 If C{keyring} already has an entry, simply return the existing Keyring
1330 @type keyring: string
1331 @param keyring: the keyring name
1334 @return: the Keyring object for this keyring
1337 q = session.query(Keyring).filter_by(keyring_name=keyring)
1341 except NoResultFound:
1344 __all__.append('get_keyring')
1347 def get_active_keyring_paths(session=None):
1350 @return: list of active keyring paths
1352 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1354 __all__.append('get_active_keyring_paths')
1357 def get_primary_keyring_path(session=None):
1359 Get the full path to the highest priority active keyring
1362 @return: path to the active keyring with the highest priority or None if no
1363 keyring is configured
1365 keyrings = get_active_keyring_paths()
1367 if len(keyrings) > 0:
1372 __all__.append('get_primary_keyring_path')
1374 ################################################################################
1376 class DBChange(object):
1377 def __init__(self, *args, **kwargs):
1381 return '<DBChange %s>' % self.changesname
1383 __all__.append('DBChange')
1386 def get_dbchange(filename, session=None):
1388 returns DBChange object for given C{filename}.
1390 @type filename: string
1391 @param filename: the name of the file
1393 @type session: Session
1394 @param session: Optional SQLA session object (a temporary one will be
1395 generated if not supplied)
1398 @return: DBChange object for the given filename (C{None} if not present)
1401 q = session.query(DBChange).filter_by(changesname=filename)
1405 except NoResultFound:
1408 __all__.append('get_dbchange')
1410 ################################################################################
1412 class Maintainer(ORMObject):
1413 def __init__(self, name = None):
1416 def properties(self):
1417 return ['name', 'maintainer_id']
1419 def not_null_constraints(self):
1422 def get_split_maintainer(self):
1423 if not hasattr(self, 'name') or self.name is None:
1424 return ('', '', '', '')
1426 return fix_maintainer(self.name.strip())
1428 __all__.append('Maintainer')
1431 def get_or_set_maintainer(name, session=None):
1433 Returns Maintainer object for given maintainer name.
1435 If no matching maintainer name is found, a row is inserted.
1438 @param name: The maintainer name to add
1440 @type session: SQLAlchemy
1441 @param session: Optional SQL session object (a temporary one will be
1442 generated if not supplied). If not passed, a commit will be performed at
1443 the end of the function, otherwise the caller is responsible for commiting.
1444 A flush will be performed either way.
1447 @return: the Maintainer object for the given maintainer
1450 q = session.query(Maintainer).filter_by(name=name)
1453 except NoResultFound:
1454 maintainer = Maintainer()
1455 maintainer.name = name
1456 session.add(maintainer)
1457 session.commit_or_flush()
1462 __all__.append('get_or_set_maintainer')
1465 def get_maintainer(maintainer_id, session=None):
1467 Return the name of the maintainer behind C{maintainer_id} or None if that
1468 maintainer_id is invalid.
1470 @type maintainer_id: int
1471 @param maintainer_id: the id of the maintainer
1474 @return: the Maintainer with this C{maintainer_id}
1477 return session.query(Maintainer).get(maintainer_id)
1479 __all__.append('get_maintainer')
1481 ################################################################################
1483 class NewComment(object):
1484 def __init__(self, *args, **kwargs):
1488 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1490 __all__.append('NewComment')
1493 def has_new_comment(policy_queue, package, version, session=None):
1495 Returns true if the given combination of C{package}, C{version} has a comment.
1497 @type package: string
1498 @param package: name of the package
1500 @type version: string
1501 @param version: package version
1503 @type session: Session
1504 @param session: Optional SQLA session object (a temporary one will be
1505 generated if not supplied)
1511 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1512 q = q.filter_by(package=package)
1513 q = q.filter_by(version=version)
1515 return bool(q.count() > 0)
1517 __all__.append('has_new_comment')
1520 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1522 Returns (possibly empty) list of NewComment objects for the given
1525 @type package: string (optional)
1526 @param package: name of the package
1528 @type version: string (optional)
1529 @param version: package version
1531 @type comment_id: int (optional)
1532 @param comment_id: An id of a comment
1534 @type session: Session
1535 @param session: Optional SQLA session object (a temporary one will be
1536 generated if not supplied)
1539 @return: A (possibly empty) list of NewComment objects will be returned
1542 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1543 if package is not None: q = q.filter_by(package=package)
1544 if version is not None: q = q.filter_by(version=version)
1545 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1549 __all__.append('get_new_comments')
1551 ################################################################################
1553 class Override(ORMObject):
1554 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1555 section = None, priority = None):
1556 self.package = package
1558 self.component = component
1559 self.overridetype = overridetype
1560 self.section = section
1561 self.priority = priority
1563 def properties(self):
1564 return ['package', 'suite', 'component', 'overridetype', 'section', \
1567 def not_null_constraints(self):
1568 return ['package', 'suite', 'component', 'overridetype', 'section']
1570 __all__.append('Override')
1573 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1575 Returns Override object for the given parameters
1577 @type package: string
1578 @param package: The name of the package
1580 @type suite: string, list or None
1581 @param suite: The name of the suite (or suites if a list) to limit to. If
1582 None, don't limit. Defaults to None.
1584 @type component: string, list or None
1585 @param component: The name of the component (or components if a list) to
1586 limit to. If None, don't limit. Defaults to None.
1588 @type overridetype: string, list or None
1589 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1590 limit to. If None, don't limit. Defaults to None.
1592 @type session: Session
1593 @param session: Optional SQLA session object (a temporary one will be
1594 generated if not supplied)
1597 @return: A (possibly empty) list of Override objects will be returned
1600 q = session.query(Override)
1601 q = q.filter_by(package=package)
1603 if suite is not None:
1604 if not isinstance(suite, list): suite = [suite]
1605 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1607 if component is not None:
1608 if not isinstance(component, list): component = [component]
1609 q = q.join(Component).filter(Component.component_name.in_(component))
1611 if overridetype is not None:
1612 if not isinstance(overridetype, list): overridetype = [overridetype]
1613 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1617 __all__.append('get_override')
1620 ################################################################################
1622 class OverrideType(ORMObject):
1623 def __init__(self, overridetype = None):
1624 self.overridetype = overridetype
1626 def properties(self):
1627 return ['overridetype', 'overridetype_id', 'overrides_count']
1629 def not_null_constraints(self):
1630 return ['overridetype']
1632 __all__.append('OverrideType')
1635 def get_override_type(override_type, session=None):
1637 Returns OverrideType object for given C{override type}.
1639 @type override_type: string
1640 @param override_type: The name of the override type
1642 @type session: Session
1643 @param session: Optional SQLA session object (a temporary one will be
1644 generated if not supplied)
1647 @return: the database id for the given override type
1650 q = session.query(OverrideType).filter_by(overridetype=override_type)
1654 except NoResultFound:
1657 __all__.append('get_override_type')
1659 ################################################################################
1661 class PolicyQueue(object):
1662 def __init__(self, *args, **kwargs):
1666 return '<PolicyQueue %s>' % self.queue_name
1668 __all__.append('PolicyQueue')
1671 def get_policy_queue(queuename, session=None):
1673 Returns PolicyQueue object for given C{queue name}
1675 @type queuename: string
1676 @param queuename: The name of the queue
1678 @type session: Session
1679 @param session: Optional SQLA session object (a temporary one will be
1680 generated if not supplied)
1683 @return: PolicyQueue object for the given queue
1686 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1690 except NoResultFound:
1693 __all__.append('get_policy_queue')
1695 ################################################################################
1697 class PolicyQueueUpload(object):
1698 def __cmp__(self, other):
1699 ret = cmp(self.changes.source, other.changes.source)
1701 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1703 if self.source is not None and other.source is None:
1705 elif self.source is None and other.source is not None:
1708 ret = cmp(self.changes.changesname, other.changes.changesname)
1711 __all__.append('PolicyQueueUpload')
1713 ################################################################################
1715 class PolicyQueueByhandFile(object):
1718 __all__.append('PolicyQueueByhandFile')
1720 ################################################################################
1722 class Priority(ORMObject):
1723 def __init__(self, priority = None, level = None):
1724 self.priority = priority
1727 def properties(self):
1728 return ['priority', 'priority_id', 'level', 'overrides_count']
1730 def not_null_constraints(self):
1731 return ['priority', 'level']
1733 def __eq__(self, val):
1734 if isinstance(val, str):
1735 return (self.priority == val)
1736 # This signals to use the normal comparison operator
1737 return NotImplemented
1739 def __ne__(self, val):
1740 if isinstance(val, str):
1741 return (self.priority != val)
1742 # This signals to use the normal comparison operator
1743 return NotImplemented
1745 __all__.append('Priority')
1748 def get_priority(priority, session=None):
1750 Returns Priority object for given C{priority name}.
1752 @type priority: string
1753 @param priority: The name of the priority
1755 @type session: Session
1756 @param session: Optional SQLA session object (a temporary one will be
1757 generated if not supplied)
1760 @return: Priority object for the given priority
1763 q = session.query(Priority).filter_by(priority=priority)
1767 except NoResultFound:
1770 __all__.append('get_priority')
1773 def get_priorities(session=None):
1775 Returns dictionary of priority names -> id mappings
1777 @type session: Session
1778 @param session: Optional SQL session object (a temporary one will be
1779 generated if not supplied)
1782 @return: dictionary of priority names -> id mappings
1786 q = session.query(Priority)
1788 ret[x.priority] = x.priority_id
1792 __all__.append('get_priorities')
1794 ################################################################################
1796 class Section(ORMObject):
1797 def __init__(self, section = None):
1798 self.section = section
1800 def properties(self):
1801 return ['section', 'section_id', 'overrides_count']
1803 def not_null_constraints(self):
1806 def __eq__(self, val):
1807 if isinstance(val, str):
1808 return (self.section == val)
1809 # This signals to use the normal comparison operator
1810 return NotImplemented
1812 def __ne__(self, val):
1813 if isinstance(val, str):
1814 return (self.section != val)
1815 # This signals to use the normal comparison operator
1816 return NotImplemented
1818 __all__.append('Section')
1821 def get_section(section, session=None):
1823 Returns Section object for given C{section name}.
1825 @type section: string
1826 @param section: The name of the section
1828 @type session: Session
1829 @param session: Optional SQLA session object (a temporary one will be
1830 generated if not supplied)
1833 @return: Section object for the given section name
1836 q = session.query(Section).filter_by(section=section)
1840 except NoResultFound:
1843 __all__.append('get_section')
1846 def get_sections(session=None):
1848 Returns dictionary of section names -> id mappings
1850 @type session: Session
1851 @param session: Optional SQL session object (a temporary one will be
1852 generated if not supplied)
1855 @return: dictionary of section names -> id mappings
1859 q = session.query(Section)
1861 ret[x.section] = x.section_id
1865 __all__.append('get_sections')
1867 ################################################################################
1869 class SignatureHistory(ORMObject):
1871 def from_signed_file(cls, signed_file):
1872 """signature history entry from signed file
1874 @type signed_file: L{daklib.gpg.SignedFile}
1875 @param signed_file: signed file
1877 @rtype: L{SignatureHistory}
1880 self.fingerprint = signed_file.primary_fingerprint
1881 self.signature_timestamp = signed_file.signature_timestamp
1882 self.contents_sha1 = signed_file.contents_sha1()
1885 __all__.append('SignatureHistory')
1887 ################################################################################
1889 class SrcContents(ORMObject):
1890 def __init__(self, file = None, source = None):
1892 self.source = source
1894 def properties(self):
1895 return ['file', 'source']
1897 __all__.append('SrcContents')
1899 ################################################################################
1901 from debian.debfile import Deb822
1903 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1904 class Dak822(Deb822):
1905 def _internal_parser(self, sequence, fields=None):
1906 # The key is non-whitespace, non-colon characters before any colon.
1907 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1908 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1909 multi = re.compile(key_part + r"$")
1910 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1912 wanted_field = lambda f: fields is None or f in fields
1914 if isinstance(sequence, basestring):
1915 sequence = sequence.splitlines()
1919 for line in self.gpg_stripped_paragraph(sequence):
1920 m = single.match(line)
1923 self[curkey] = content
1925 if not wanted_field(m.group('key')):
1929 curkey = m.group('key')
1930 content = m.group('data')
1933 m = multi.match(line)
1936 self[curkey] = content
1938 if not wanted_field(m.group('key')):
1942 curkey = m.group('key')
1946 m = multidata.match(line)
1948 content += '\n' + line # XXX not m.group('data')?
1952 self[curkey] = content
1955 class DBSource(ORMObject):
1956 def __init__(self, source = None, version = None, maintainer = None, \
1957 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1958 self.source = source
1959 self.version = version
1960 self.maintainer = maintainer
1961 self.changedby = changedby
1962 self.poolfile = poolfile
1963 self.install_date = install_date
1964 self.fingerprint = fingerprint
1968 return self.source_id
1970 def properties(self):
1971 return ['source', 'source_id', 'maintainer', 'changedby', \
1972 'fingerprint', 'poolfile', 'version', 'suites_count', \
1973 'install_date', 'binaries_count', 'uploaders_count']
1975 def not_null_constraints(self):
1976 return ['source', 'version', 'install_date', 'maintainer', \
1977 'changedby', 'poolfile']
1979 def read_control_fields(self):
1981 Reads the control information from a dsc
1984 @return: fields is the dsc information in a dictionary form
1986 fullpath = self.poolfile.fullpath
1987 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1990 metadata = association_proxy('key', 'value')
1992 def scan_contents(self):
1994 Returns a set of names for non directories. The path names are
1995 normalized after converting them from either utf-8 or iso8859-1
1998 fullpath = self.poolfile.fullpath
1999 from daklib.contents import UnpackedSource
2000 unpacked = UnpackedSource(fullpath)
2002 for name in unpacked.get_all_filenames():
2003 # enforce proper utf-8 encoding
2005 name.decode('utf-8')
2006 except UnicodeDecodeError:
2007 name = name.decode('iso8859-1').encode('utf-8')
2011 __all__.append('DBSource')
2014 def source_exists(source, source_version, suites = ["any"], session=None):
2016 Ensure that source exists somewhere in the archive for the binary
2017 upload being processed.
2018 1. exact match => 1.0-3
2019 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2021 @type source: string
2022 @param source: source name
2024 @type source_version: string
2025 @param source_version: expected source version
2028 @param suites: list of suites to check in, default I{any}
2030 @type session: Session
2031 @param session: Optional SQLA session object (a temporary one will be
2032 generated if not supplied)
2035 @return: returns 1 if a source with expected version is found, otherwise 0
2042 from daklib.regexes import re_bin_only_nmu
2043 orig_source_version = re_bin_only_nmu.sub('', source_version)
2045 for suite in suites:
2046 q = session.query(DBSource).filter_by(source=source). \
2047 filter(DBSource.version.in_([source_version, orig_source_version]))
2049 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2050 s = get_suite(suite, session)
2052 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2053 considered_suites = [ vc.reference for vc in enhances_vcs ]
2054 considered_suites.append(s)
2056 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2061 # No source found so return not ok
2066 __all__.append('source_exists')
2069 def get_suites_source_in(source, session=None):
2071 Returns list of Suite objects which given C{source} name is in
2074 @param source: DBSource package name to search for
2077 @return: list of Suite objects for the given source
2080 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2082 __all__.append('get_suites_source_in')
2085 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2087 Returns list of DBSource objects for given C{source} name and other parameters
2090 @param source: DBSource package name to search for
2092 @type version: str or None
2093 @param version: DBSource version name to search for or None if not applicable
2095 @type dm_upload_allowed: bool
2096 @param dm_upload_allowed: If None, no effect. If True or False, only
2097 return packages with that dm_upload_allowed setting
2099 @type session: Session
2100 @param session: Optional SQL session object (a temporary one will be
2101 generated if not supplied)
2104 @return: list of DBSource objects for the given name (may be empty)
2107 q = session.query(DBSource).filter_by(source=source)
2109 if version is not None:
2110 q = q.filter_by(version=version)
2112 if dm_upload_allowed is not None:
2113 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2117 __all__.append('get_sources_from_name')
2119 # FIXME: This function fails badly if it finds more than 1 source package and
2120 # its implementation is trivial enough to be inlined.
2122 def get_source_in_suite(source, suite, session=None):
2124 Returns a DBSource object for a combination of C{source} and C{suite}.
2126 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2127 - B{suite} - a suite name, eg. I{unstable}
2129 @type source: string
2130 @param source: source package name
2133 @param suite: the suite name
2136 @return: the version for I{source} in I{suite}
2140 q = get_suite(suite, session).get_sources(source)
2143 except NoResultFound:
2146 __all__.append('get_source_in_suite')
2149 def import_metadata_into_db(obj, session=None):
2151 This routine works on either DBBinary or DBSource objects and imports
2152 their metadata into the database
2154 fields = obj.read_control_fields()
2155 for k in fields.keys():
2158 val = str(fields[k])
2159 except UnicodeEncodeError:
2160 # Fall back to UTF-8
2162 val = fields[k].encode('utf-8')
2163 except UnicodeEncodeError:
2164 # Finally try iso8859-1
2165 val = fields[k].encode('iso8859-1')
2166 # Otherwise we allow the exception to percolate up and we cause
2167 # a reject as someone is playing silly buggers
2169 obj.metadata[get_or_set_metadatakey(k, session)] = val
2171 session.commit_or_flush()
2173 __all__.append('import_metadata_into_db')
2175 ################################################################################
2177 class SrcFormat(object):
2178 def __init__(self, *args, **kwargs):
2182 return '<SrcFormat %s>' % (self.format_name)
2184 __all__.append('SrcFormat')
2186 ################################################################################
2188 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2189 ('SuiteID', 'suite_id'),
2190 ('Version', 'version'),
2191 ('Origin', 'origin'),
2193 ('Description', 'description'),
2194 ('Untouchable', 'untouchable'),
2195 ('Announce', 'announce'),
2196 ('Codename', 'codename'),
2197 ('OverrideCodename', 'overridecodename'),
2198 ('ValidTime', 'validtime'),
2199 ('Priority', 'priority'),
2200 ('NotAutomatic', 'notautomatic'),
2201 ('CopyChanges', 'copychanges'),
2202 ('OverrideSuite', 'overridesuite')]
2204 # Why the heck don't we have any UNIQUE constraints in table suite?
2205 # TODO: Add UNIQUE constraints for appropriate columns.
2206 class Suite(ORMObject):
2207 def __init__(self, suite_name = None, version = None):
2208 self.suite_name = suite_name
2209 self.version = version
2211 def properties(self):
2212 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2215 def not_null_constraints(self):
2216 return ['suite_name']
2218 def __eq__(self, val):
2219 if isinstance(val, str):
2220 return (self.suite_name == val)
2221 # This signals to use the normal comparison operator
2222 return NotImplemented
2224 def __ne__(self, val):
2225 if isinstance(val, str):
2226 return (self.suite_name != val)
2227 # This signals to use the normal comparison operator
2228 return NotImplemented
2232 for disp, field in SUITE_FIELDS:
2233 val = getattr(self, field, None)
2235 ret.append("%s: %s" % (disp, val))
2237 return "\n".join(ret)
2239 def get_architectures(self, skipsrc=False, skipall=False):
2241 Returns list of Architecture objects
2243 @type skipsrc: boolean
2244 @param skipsrc: Whether to skip returning the 'source' architecture entry
2247 @type skipall: boolean
2248 @param skipall: Whether to skip returning the 'all' architecture entry
2252 @return: list of Architecture objects for the given name (may be empty)
2255 q = object_session(self).query(Architecture).with_parent(self)
2257 q = q.filter(Architecture.arch_string != 'source')
2259 q = q.filter(Architecture.arch_string != 'all')
2260 return q.order_by(Architecture.arch_string).all()
2262 def get_sources(self, source):
2264 Returns a query object representing DBSource that is part of C{suite}.
2266 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2268 @type source: string
2269 @param source: source package name
2271 @rtype: sqlalchemy.orm.query.Query
2272 @return: a query of DBSource
2276 session = object_session(self)
2277 return session.query(DBSource).filter_by(source = source). \
2280 def get_overridesuite(self):
2281 if self.overridesuite is None:
2284 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2288 return os.path.join(self.archive.path, 'dists', self.suite_name)
2290 __all__.append('Suite')
2293 def get_suite(suite, session=None):
2295 Returns Suite object for given C{suite name}.
2298 @param suite: The name of the suite
2300 @type session: Session
2301 @param session: Optional SQLA session object (a temporary one will be
2302 generated if not supplied)
2305 @return: Suite object for the requested suite name (None if not present)
2308 q = session.query(Suite).filter_by(suite_name=suite)
2312 except NoResultFound:
2315 __all__.append('get_suite')
2317 ################################################################################
2320 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2322 Returns list of Architecture objects for given C{suite} name. The list is
2323 empty if suite does not exist.
2326 @param suite: Suite name to search for
2328 @type skipsrc: boolean
2329 @param skipsrc: Whether to skip returning the 'source' architecture entry
2332 @type skipall: boolean
2333 @param skipall: Whether to skip returning the 'all' architecture entry
2336 @type session: Session
2337 @param session: Optional SQL session object (a temporary one will be
2338 generated if not supplied)
2341 @return: list of Architecture objects for the given name (may be empty)
2345 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2346 except AttributeError:
2349 __all__.append('get_suite_architectures')
2351 ################################################################################
2353 class Uid(ORMObject):
2354 def __init__(self, uid = None, name = None):
2358 def __eq__(self, val):
2359 if isinstance(val, str):
2360 return (self.uid == val)
2361 # This signals to use the normal comparison operator
2362 return NotImplemented
2364 def __ne__(self, val):
2365 if isinstance(val, str):
2366 return (self.uid != val)
2367 # This signals to use the normal comparison operator
2368 return NotImplemented
2370 def properties(self):
2371 return ['uid', 'name', 'fingerprint']
2373 def not_null_constraints(self):
2376 __all__.append('Uid')
2379 def get_or_set_uid(uidname, session=None):
2381 Returns uid object for given uidname.
2383 If no matching uidname is found, a row is inserted.
2385 @type uidname: string
2386 @param uidname: The uid to add
2388 @type session: SQLAlchemy
2389 @param session: Optional SQL session object (a temporary one will be
2390 generated if not supplied). If not passed, a commit will be performed at
2391 the end of the function, otherwise the caller is responsible for commiting.
2394 @return: the uid object for the given uidname
2397 q = session.query(Uid).filter_by(uid=uidname)
2401 except NoResultFound:
2405 session.commit_or_flush()
2410 __all__.append('get_or_set_uid')
2413 def get_uid_from_fingerprint(fpr, session=None):
2414 q = session.query(Uid)
2415 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2419 except NoResultFound:
2422 __all__.append('get_uid_from_fingerprint')
2424 ################################################################################
2426 class MetadataKey(ORMObject):
2427 def __init__(self, key = None):
2430 def properties(self):
2433 def not_null_constraints(self):
2436 __all__.append('MetadataKey')
2439 def get_or_set_metadatakey(keyname, session=None):
2441 Returns MetadataKey object for given uidname.
2443 If no matching keyname is found, a row is inserted.
2445 @type uidname: string
2446 @param uidname: The keyname to add
2448 @type session: SQLAlchemy
2449 @param session: Optional SQL session object (a temporary one will be
2450 generated if not supplied). If not passed, a commit will be performed at
2451 the end of the function, otherwise the caller is responsible for commiting.
2454 @return: the metadatakey object for the given keyname
2457 q = session.query(MetadataKey).filter_by(key=keyname)
2461 except NoResultFound:
2462 ret = MetadataKey(keyname)
2464 session.commit_or_flush()
2468 __all__.append('get_or_set_metadatakey')
2470 ################################################################################
2472 class BinaryMetadata(ORMObject):
2473 def __init__(self, key = None, value = None, binary = None):
2476 self.binary = binary
2478 def properties(self):
2479 return ['binary', 'key', 'value']
2481 def not_null_constraints(self):
2484 __all__.append('BinaryMetadata')
2486 ################################################################################
2488 class SourceMetadata(ORMObject):
2489 def __init__(self, key = None, value = None, source = None):
2492 self.source = source
2494 def properties(self):
2495 return ['source', 'key', 'value']
2497 def not_null_constraints(self):
2500 __all__.append('SourceMetadata')
2502 ################################################################################
2504 class VersionCheck(ORMObject):
2505 def __init__(self, *args, **kwargs):
2508 def properties(self):
2509 #return ['suite_id', 'check', 'reference_id']
2512 def not_null_constraints(self):
2513 return ['suite', 'check', 'reference']
2515 __all__.append('VersionCheck')
2518 def get_version_checks(suite_name, check = None, session = None):
2519 suite = get_suite(suite_name, session)
2521 # Make sure that what we return is iterable so that list comprehensions
2522 # involving this don't cause a traceback
2524 q = session.query(VersionCheck).filter_by(suite=suite)
2526 q = q.filter_by(check=check)
2529 __all__.append('get_version_checks')
2531 ################################################################################
2533 class DBConn(object):
2535 database module init.
2539 def __init__(self, *args, **kwargs):
2540 self.__dict__ = self.__shared_state
2542 if not getattr(self, 'initialised', False):
2543 self.initialised = True
2544 self.debug = kwargs.has_key('debug')
2547 def __setuptables(self):
2550 'acl_architecture_map',
2551 'acl_fingerprint_map',
2558 'binaries_metadata',
2565 'external_overrides',
2566 'extra_src_references',
2568 'files_archive_map',
2574 # TODO: the maintainer column in table override should be removed.
2578 'policy_queue_upload',
2579 'policy_queue_upload_binaries_map',
2580 'policy_queue_byhand_file',
2583 'signature_history',
2592 'suite_architectures',
2593 'suite_build_queue_copy',
2594 'suite_src_formats',
2600 'almost_obsolete_all_associations',
2601 'almost_obsolete_src_associations',
2602 'any_associations_source',
2603 'bin_associations_binaries',
2604 'binaries_suite_arch',
2607 'newest_all_associations',
2608 'newest_any_associations',
2610 'newest_src_association',
2611 'obsolete_all_associations',
2612 'obsolete_any_associations',
2613 'obsolete_any_by_all_associations',
2614 'obsolete_src_associations',
2616 'src_associations_bin',
2617 'src_associations_src',
2618 'suite_arch_by_name',
2621 for table_name in tables:
2622 table = Table(table_name, self.db_meta, \
2623 autoload=True, useexisting=True)
2624 setattr(self, 'tbl_%s' % table_name, table)
2626 for view_name in views:
2627 view = Table(view_name, self.db_meta, autoload=True)
2628 setattr(self, 'view_%s' % view_name, view)
2630 def __setupmappers(self):
2631 mapper(Architecture, self.tbl_architecture,
2632 properties = dict(arch_id = self.tbl_architecture.c.id,
2633 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2634 order_by=self.tbl_suite.c.suite_name,
2635 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2636 extension = validator)
2638 mapper(ACL, self.tbl_acl,
2640 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2641 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2642 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2643 per_source = relation(ACLPerSource, collection_class=set),
2646 mapper(ACLPerSource, self.tbl_acl_per_source,
2648 acl = relation(ACL),
2649 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2650 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2653 mapper(Archive, self.tbl_archive,
2654 properties = dict(archive_id = self.tbl_archive.c.id,
2655 archive_name = self.tbl_archive.c.name))
2657 mapper(ArchiveFile, self.tbl_files_archive_map,
2658 properties = dict(archive = relation(Archive, backref='files'),
2659 component = relation(Component),
2660 file = relation(PoolFile, backref='archives')))
2662 mapper(BuildQueue, self.tbl_build_queue,
2663 properties = dict(queue_id = self.tbl_build_queue.c.id,
2664 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2666 mapper(DBBinary, self.tbl_binaries,
2667 properties = dict(binary_id = self.tbl_binaries.c.id,
2668 package = self.tbl_binaries.c.package,
2669 version = self.tbl_binaries.c.version,
2670 maintainer_id = self.tbl_binaries.c.maintainer,
2671 maintainer = relation(Maintainer),
2672 source_id = self.tbl_binaries.c.source,
2673 source = relation(DBSource, backref='binaries'),
2674 arch_id = self.tbl_binaries.c.architecture,
2675 architecture = relation(Architecture),
2676 poolfile_id = self.tbl_binaries.c.file,
2677 poolfile = relation(PoolFile),
2678 binarytype = self.tbl_binaries.c.type,
2679 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2680 fingerprint = relation(Fingerprint),
2681 install_date = self.tbl_binaries.c.install_date,
2682 suites = relation(Suite, secondary=self.tbl_bin_associations,
2683 backref=backref('binaries', lazy='dynamic')),
2684 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2685 backref=backref('extra_binary_references', lazy='dynamic')),
2686 key = relation(BinaryMetadata, cascade='all',
2687 collection_class=attribute_mapped_collection('key'))),
2688 extension = validator)
2690 mapper(Component, self.tbl_component,
2691 properties = dict(component_id = self.tbl_component.c.id,
2692 component_name = self.tbl_component.c.name),
2693 extension = validator)
2695 mapper(DBConfig, self.tbl_config,
2696 properties = dict(config_id = self.tbl_config.c.id))
2698 mapper(DSCFile, self.tbl_dsc_files,
2699 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2700 source_id = self.tbl_dsc_files.c.source,
2701 source = relation(DBSource),
2702 poolfile_id = self.tbl_dsc_files.c.file,
2703 poolfile = relation(PoolFile)))
2705 mapper(ExternalOverride, self.tbl_external_overrides,
2707 suite_id = self.tbl_external_overrides.c.suite,
2708 suite = relation(Suite),
2709 component_id = self.tbl_external_overrides.c.component,
2710 component = relation(Component)))
2712 mapper(PoolFile, self.tbl_files,
2713 properties = dict(file_id = self.tbl_files.c.id,
2714 filesize = self.tbl_files.c.size),
2715 extension = validator)
2717 mapper(Fingerprint, self.tbl_fingerprint,
2718 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2719 uid_id = self.tbl_fingerprint.c.uid,
2720 uid = relation(Uid),
2721 keyring_id = self.tbl_fingerprint.c.keyring,
2722 keyring = relation(Keyring),
2723 acl = relation(ACL)),
2724 extension = validator)
2726 mapper(Keyring, self.tbl_keyrings,
2727 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2728 keyring_id = self.tbl_keyrings.c.id,
2729 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2731 mapper(DBChange, self.tbl_changes,
2732 properties = dict(change_id = self.tbl_changes.c.id,
2733 seen = self.tbl_changes.c.seen,
2734 source = self.tbl_changes.c.source,
2735 binaries = self.tbl_changes.c.binaries,
2736 architecture = self.tbl_changes.c.architecture,
2737 distribution = self.tbl_changes.c.distribution,
2738 urgency = self.tbl_changes.c.urgency,
2739 maintainer = self.tbl_changes.c.maintainer,
2740 changedby = self.tbl_changes.c.changedby,
2741 date = self.tbl_changes.c.date,
2742 version = self.tbl_changes.c.version))
2744 mapper(Maintainer, self.tbl_maintainer,
2745 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2746 maintains_sources = relation(DBSource, backref='maintainer',
2747 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2748 changed_sources = relation(DBSource, backref='changedby',
2749 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2750 extension = validator)
2752 mapper(NewComment, self.tbl_new_comments,
2753 properties = dict(comment_id = self.tbl_new_comments.c.id,
2754 policy_queue = relation(PolicyQueue)))
2756 mapper(Override, self.tbl_override,
2757 properties = dict(suite_id = self.tbl_override.c.suite,
2758 suite = relation(Suite, \
2759 backref=backref('overrides', lazy='dynamic')),
2760 package = self.tbl_override.c.package,
2761 component_id = self.tbl_override.c.component,
2762 component = relation(Component, \
2763 backref=backref('overrides', lazy='dynamic')),
2764 priority_id = self.tbl_override.c.priority,
2765 priority = relation(Priority, \
2766 backref=backref('overrides', lazy='dynamic')),
2767 section_id = self.tbl_override.c.section,
2768 section = relation(Section, \
2769 backref=backref('overrides', lazy='dynamic')),
2770 overridetype_id = self.tbl_override.c.type,
2771 overridetype = relation(OverrideType, \
2772 backref=backref('overrides', lazy='dynamic'))))
2774 mapper(OverrideType, self.tbl_override_type,
2775 properties = dict(overridetype = self.tbl_override_type.c.type,
2776 overridetype_id = self.tbl_override_type.c.id))
2778 mapper(PolicyQueue, self.tbl_policy_queue,
2779 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2780 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2782 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2784 changes = relation(DBChange),
2785 policy_queue = relation(PolicyQueue, backref='uploads'),
2786 target_suite = relation(Suite),
2787 source = relation(DBSource),
2788 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2791 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2793 upload = relation(PolicyQueueUpload, backref='byhand'),
2797 mapper(Priority, self.tbl_priority,
2798 properties = dict(priority_id = self.tbl_priority.c.id))
2800 mapper(Section, self.tbl_section,
2801 properties = dict(section_id = self.tbl_section.c.id,
2802 section=self.tbl_section.c.section))
2804 mapper(SignatureHistory, self.tbl_signature_history)
2806 mapper(DBSource, self.tbl_source,
2807 properties = dict(source_id = self.tbl_source.c.id,
2808 version = self.tbl_source.c.version,
2809 maintainer_id = self.tbl_source.c.maintainer,
2810 poolfile_id = self.tbl_source.c.file,
2811 poolfile = relation(PoolFile),
2812 fingerprint_id = self.tbl_source.c.sig_fpr,
2813 fingerprint = relation(Fingerprint),
2814 changedby_id = self.tbl_source.c.changedby,
2815 srcfiles = relation(DSCFile,
2816 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2817 suites = relation(Suite, secondary=self.tbl_src_associations,
2818 backref=backref('sources', lazy='dynamic')),
2819 uploaders = relation(Maintainer,
2820 secondary=self.tbl_src_uploaders),
2821 key = relation(SourceMetadata, cascade='all',
2822 collection_class=attribute_mapped_collection('key'))),
2823 extension = validator)
2825 mapper(SrcFormat, self.tbl_src_format,
2826 properties = dict(src_format_id = self.tbl_src_format.c.id,
2827 format_name = self.tbl_src_format.c.format_name))
2829 mapper(Suite, self.tbl_suite,
2830 properties = dict(suite_id = self.tbl_suite.c.id,
2831 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2832 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2833 copy_queues = relation(BuildQueue,
2834 secondary=self.tbl_suite_build_queue_copy),
2835 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2836 backref=backref('suites', lazy='dynamic')),
2837 archive = relation(Archive, backref='suites'),
2838 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set)),
2839 extension = validator)
2841 mapper(Uid, self.tbl_uid,
2842 properties = dict(uid_id = self.tbl_uid.c.id,
2843 fingerprint = relation(Fingerprint)),
2844 extension = validator)
2846 mapper(BinContents, self.tbl_bin_contents,
2848 binary = relation(DBBinary,
2849 backref=backref('contents', lazy='dynamic', cascade='all')),
2850 file = self.tbl_bin_contents.c.file))
2852 mapper(SrcContents, self.tbl_src_contents,
2854 source = relation(DBSource,
2855 backref=backref('contents', lazy='dynamic', cascade='all')),
2856 file = self.tbl_src_contents.c.file))
2858 mapper(MetadataKey, self.tbl_metadata_keys,
2860 key_id = self.tbl_metadata_keys.c.key_id,
2861 key = self.tbl_metadata_keys.c.key))
2863 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2865 binary_id = self.tbl_binaries_metadata.c.bin_id,
2866 binary = relation(DBBinary),
2867 key_id = self.tbl_binaries_metadata.c.key_id,
2868 key = relation(MetadataKey),
2869 value = self.tbl_binaries_metadata.c.value))
2871 mapper(SourceMetadata, self.tbl_source_metadata,
2873 source_id = self.tbl_source_metadata.c.src_id,
2874 source = relation(DBSource),
2875 key_id = self.tbl_source_metadata.c.key_id,
2876 key = relation(MetadataKey),
2877 value = self.tbl_source_metadata.c.value))
2879 mapper(VersionCheck, self.tbl_version_check,
2881 suite_id = self.tbl_version_check.c.suite,
2882 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2883 reference_id = self.tbl_version_check.c.reference,
2884 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2886 ## Connection functions
2887 def __createconn(self):
2888 from config import Config
2890 if cnf.has_key("DB::Service"):
2891 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2892 elif cnf.has_key("DB::Host"):
2894 connstr = "postgresql://%s" % cnf["DB::Host"]
2895 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2896 connstr += ":%s" % cnf["DB::Port"]
2897 connstr += "/%s" % cnf["DB::Name"]
2900 connstr = "postgresql:///%s" % cnf["DB::Name"]
2901 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2902 connstr += "?port=%s" % cnf["DB::Port"]
2904 engine_args = { 'echo': self.debug }
2905 if cnf.has_key('DB::PoolSize'):
2906 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2907 if cnf.has_key('DB::MaxOverflow'):
2908 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2909 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2910 cnf['DB::Unicode'] == 'false':
2911 engine_args['use_native_unicode'] = False
2913 # Monkey patch a new dialect in in order to support service= syntax
2914 import sqlalchemy.dialects.postgresql
2915 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2916 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2917 def create_connect_args(self, url):
2918 if str(url).startswith('postgresql://service='):
2920 servicename = str(url)[21:]
2921 return (['service=%s' % servicename], {})
2923 return PGDialect_psycopg2.create_connect_args(self, url)
2925 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2928 self.db_pg = create_engine(connstr, **engine_args)
2929 self.db_meta = MetaData()
2930 self.db_meta.bind = self.db_pg
2931 self.db_smaker = sessionmaker(bind=self.db_pg,
2935 self.__setuptables()
2936 self.__setupmappers()
2938 except OperationalError as e:
2940 utils.fubar("Cannot connect to database (%s)" % str(e))
2942 self.pid = os.getpid()
2944 def session(self, work_mem = 0):
2946 Returns a new session object. If a work_mem parameter is provided a new
2947 transaction is started and the work_mem parameter is set for this
2948 transaction. The work_mem parameter is measured in MB. A default value
2949 will be used if the parameter is not set.
2951 # reinitialize DBConn in new processes
2952 if self.pid != os.getpid():
2955 session = self.db_smaker()
2957 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2960 __all__.append('DBConn')