5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 warnings.filterwarnings('ignore', \
85 "Predicate of partial index .* ignored during reflection", \
89 ################################################################################
91 # Patch in support for the debversion field type so that it works during
95 # that is for sqlalchemy 0.6
96 UserDefinedType = sqltypes.UserDefinedType
98 # this one for sqlalchemy 0.5
99 UserDefinedType = sqltypes.TypeEngine
101 class DebVersion(UserDefinedType):
102 def get_col_spec(self):
105 def bind_processor(self, dialect):
108 # ' = None' is needed for sqlalchemy 0.5:
109 def result_processor(self, dialect, coltype = None):
112 sa_major_version = sqlalchemy.__version__[0:3]
113 if sa_major_version in ["0.5", "0.6", "0.7", "0.8"]:
114 from sqlalchemy.databases import postgres
115 postgres.ischema_names['debversion'] = DebVersion
117 raise Exception("dak only ported to SQLA versions 0.5 to 0.8. See daklib/dbconn.py")
119 ################################################################################
121 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
123 ################################################################################
125 def session_wrapper(fn):
127 Wrapper around common ".., session=None):" handling. If the wrapped
128 function is called without passing 'session', we create a local one
129 and destroy it when the function ends.
131 Also attaches a commit_or_flush method to the session; if we created a
132 local session, this is a synonym for session.commit(), otherwise it is a
133 synonym for session.flush().
136 def wrapped(*args, **kwargs):
137 private_transaction = False
139 # Find the session object
140 session = kwargs.get('session')
143 if len(args) <= len(getargspec(fn)[0]) - 1:
144 # No session specified as last argument or in kwargs
145 private_transaction = True
146 session = kwargs['session'] = DBConn().session()
148 # Session is last argument in args
152 session = args[-1] = DBConn().session()
153 private_transaction = True
155 if private_transaction:
156 session.commit_or_flush = session.commit
158 session.commit_or_flush = session.flush
161 return fn(*args, **kwargs)
163 if private_transaction:
164 # We created a session; close it.
167 wrapped.__doc__ = fn.__doc__
168 wrapped.func_name = fn.func_name
172 __all__.append('session_wrapper')
174 ################################################################################
176 class ORMObject(object):
178 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
179 derived classes must implement the properties() method.
182 def properties(self):
184 This method should be implemented by all derived classes and returns a
185 list of the important properties. The properties 'created' and
186 'modified' will be added automatically. A suffix '_count' should be
187 added to properties that are lists or query objects. The most important
188 property name should be returned as the first element in the list
189 because it is used by repr().
195 Returns a JSON representation of the object based on the properties
196 returned from the properties() method.
199 # add created and modified
200 all_properties = self.properties() + ['created', 'modified']
201 for property in all_properties:
202 # check for list or query
203 if property[-6:] == '_count':
204 real_property = property[:-6]
205 if not hasattr(self, real_property):
207 value = getattr(self, real_property)
208 if hasattr(value, '__len__'):
211 elif hasattr(value, 'count'):
212 # query (but not during validation)
213 if self.in_validation:
215 value = value.count()
217 raise KeyError('Do not understand property %s.' % property)
219 if not hasattr(self, property):
222 value = getattr(self, property)
226 elif isinstance(value, ORMObject):
227 # use repr() for ORMObject types
230 # we want a string for all other types because json cannot
233 data[property] = value
234 return json.dumps(data)
238 Returns the name of the class.
240 return type(self).__name__
244 Returns a short string representation of the object using the first
245 element from the properties() method.
247 primary_property = self.properties()[0]
248 value = getattr(self, primary_property)
249 return '<%s %s>' % (self.classname(), str(value))
253 Returns a human readable form of the object using the properties()
256 return '<%s %s>' % (self.classname(), self.json())
258 def not_null_constraints(self):
260 Returns a list of properties that must be not NULL. Derived classes
261 should override this method if needed.
265 validation_message = \
266 "Validation failed because property '%s' must not be empty in object\n%s"
268 in_validation = False
272 This function validates the not NULL constraints as returned by
273 not_null_constraints(). It raises the DBUpdateError exception if
276 for property in self.not_null_constraints():
277 # TODO: It is a bit awkward that the mapper configuration allow
278 # directly setting the numeric _id columns. We should get rid of it
280 if hasattr(self, property + '_id') and \
281 getattr(self, property + '_id') is not None:
283 if not hasattr(self, property) or getattr(self, property) is None:
284 # str() might lead to races due to a 2nd flush
285 self.in_validation = True
286 message = self.validation_message % (property, str(self))
287 self.in_validation = False
288 raise DBUpdateError(message)
292 def get(cls, primary_key, session = None):
294 This is a support function that allows getting an object by its primary
297 Architecture.get(3[, session])
299 instead of the more verbose
301 session.query(Architecture).get(3)
303 return session.query(cls).get(primary_key)
305 def session(self, replace = False):
307 Returns the current session that is associated with the object. May
308 return None is object is in detached state.
311 return object_session(self)
313 def clone(self, session = None):
315 Clones the current object in a new session and returns the new clone. A
316 fresh session is created if the optional session parameter is not
317 provided. The function will fail if a session is provided and has
320 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
321 an existing object to allow several threads to work with their own
322 instances of an ORMObject.
324 WARNING: Only persistent (committed) objects can be cloned. Changes
325 made to the original object that are not committed yet will get lost.
326 The session of the new object will always be rolled back to avoid
330 if self.session() is None:
331 raise RuntimeError( \
332 'Method clone() failed for detached object:\n%s' % self)
333 self.session().flush()
334 mapper = object_mapper(self)
335 primary_key = mapper.primary_key_from_instance(self)
336 object_class = self.__class__
338 session = DBConn().session()
339 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
340 raise RuntimeError( \
341 'Method clone() failed due to unflushed changes in session.')
342 new_object = session.query(object_class).get(primary_key)
344 if new_object is None:
345 raise RuntimeError( \
346 'Method clone() failed for non-persistent object:\n%s' % self)
349 __all__.append('ORMObject')
351 ################################################################################
353 class Validator(MapperExtension):
355 This class calls the validate() method for each instance for the
356 'before_update' and 'before_insert' events. A global object validator is
357 used for configuring the individual mappers.
360 def before_update(self, mapper, connection, instance):
364 def before_insert(self, mapper, connection, instance):
368 validator = Validator()
370 ################################################################################
372 class ACL(ORMObject):
374 return "<ACL {0}>".format(self.name)
376 __all__.append('ACL')
378 class ACLPerSource(ORMObject):
380 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
382 __all__.append('ACLPerSource')
384 ################################################################################
386 class Architecture(ORMObject):
387 def __init__(self, arch_string = None, description = None):
388 self.arch_string = arch_string
389 self.description = description
391 def __eq__(self, val):
392 if isinstance(val, str):
393 return (self.arch_string== val)
394 # This signals to use the normal comparison operator
395 return NotImplemented
397 def __ne__(self, val):
398 if isinstance(val, str):
399 return (self.arch_string != val)
400 # This signals to use the normal comparison operator
401 return NotImplemented
403 def properties(self):
404 return ['arch_string', 'arch_id', 'suites_count']
406 def not_null_constraints(self):
407 return ['arch_string']
409 __all__.append('Architecture')
412 def get_architecture(architecture, session=None):
414 Returns database id for given C{architecture}.
416 @type architecture: string
417 @param architecture: The name of the architecture
419 @type session: Session
420 @param session: Optional SQLA session object (a temporary one will be
421 generated if not supplied)
424 @return: Architecture object for the given arch (None if not present)
427 q = session.query(Architecture).filter_by(arch_string=architecture)
431 except NoResultFound:
434 __all__.append('get_architecture')
436 # TODO: should be removed because the implementation is too trivial
438 def get_architecture_suites(architecture, session=None):
440 Returns list of Suite objects for given C{architecture} name
442 @type architecture: str
443 @param architecture: Architecture name to search for
445 @type session: Session
446 @param session: Optional SQL session object (a temporary one will be
447 generated if not supplied)
450 @return: list of Suite objects for the given name (may be empty)
453 return get_architecture(architecture, session).suites
455 __all__.append('get_architecture_suites')
457 ################################################################################
459 class Archive(object):
460 def __init__(self, *args, **kwargs):
464 return '<Archive %s>' % self.archive_name
466 __all__.append('Archive')
469 def get_archive(archive, session=None):
471 returns database id for given C{archive}.
473 @type archive: string
474 @param archive: the name of the arhive
476 @type session: Session
477 @param session: Optional SQLA session object (a temporary one will be
478 generated if not supplied)
481 @return: Archive object for the given name (None if not present)
484 archive = archive.lower()
486 q = session.query(Archive).filter_by(archive_name=archive)
490 except NoResultFound:
493 __all__.append('get_archive')
495 ################################################################################
497 class ArchiveFile(object):
498 def __init__(self, archive=None, component=None, file=None):
499 self.archive = archive
500 self.component = component
504 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
506 __all__.append('ArchiveFile')
508 ################################################################################
510 class BinContents(ORMObject):
511 def __init__(self, file = None, binary = None):
515 def properties(self):
516 return ['file', 'binary']
518 __all__.append('BinContents')
520 ################################################################################
522 def subprocess_setup():
523 # Python installs a SIGPIPE handler by default. This is usually not what
524 # non-Python subprocesses expect.
525 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
527 class DBBinary(ORMObject):
528 def __init__(self, package = None, source = None, version = None, \
529 maintainer = None, architecture = None, poolfile = None, \
530 binarytype = 'deb', fingerprint=None):
531 self.package = package
533 self.version = version
534 self.maintainer = maintainer
535 self.architecture = architecture
536 self.poolfile = poolfile
537 self.binarytype = binarytype
538 self.fingerprint = fingerprint
542 return self.binary_id
544 def properties(self):
545 return ['package', 'version', 'maintainer', 'source', 'architecture', \
546 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
547 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
549 def not_null_constraints(self):
550 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
553 metadata = association_proxy('key', 'value')
555 def scan_contents(self):
557 Yields the contents of the package. Only regular files are yielded and
558 the path names are normalized after converting them from either utf-8
559 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
560 package does not contain any regular file.
562 fullpath = self.poolfile.fullpath
563 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
564 preexec_fn = subprocess_setup)
565 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
566 for member in tar.getmembers():
567 if not member.isdir():
568 name = normpath(member.name)
569 # enforce proper utf-8 encoding
572 except UnicodeDecodeError:
573 name = name.decode('iso8859-1').encode('utf-8')
579 def read_control(self):
581 Reads the control information from a binary.
584 @return: stanza text of the control section.
587 fullpath = self.poolfile.fullpath
588 deb_file = open(fullpath, 'r')
589 stanza = utils.deb_extract_control(deb_file)
594 def read_control_fields(self):
596 Reads the control information from a binary and return
600 @return: fields of the control section as a dictionary.
603 stanza = self.read_control()
604 return apt_pkg.TagSection(stanza)
606 __all__.append('DBBinary')
609 def get_suites_binary_in(package, session=None):
611 Returns list of Suite objects which given C{package} name is in
614 @param package: DBBinary package name to search for
617 @return: list of Suite objects for the given package
620 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
622 __all__.append('get_suites_binary_in')
625 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
627 Returns the component name of the newest binary package in suite_list or
628 None if no package is found. The result can be optionally filtered by a list
629 of architecture names.
632 @param package: DBBinary package name to search for
634 @type suite_list: list of str
635 @param suite_list: list of suite_name items
637 @type arch_list: list of str
638 @param arch_list: optional list of arch_string items that defaults to []
640 @rtype: str or NoneType
641 @return: name of component or None
644 q = session.query(DBBinary).filter_by(package = package). \
645 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
646 if len(arch_list) > 0:
647 q = q.join(DBBinary.architecture). \
648 filter(Architecture.arch_string.in_(arch_list))
649 binary = q.order_by(desc(DBBinary.version)).first()
653 return binary.poolfile.component.component_name
655 __all__.append('get_component_by_package_suite')
657 ################################################################################
659 class BuildQueue(object):
660 def __init__(self, *args, **kwargs):
664 return '<BuildQueue %s>' % self.queue_name
666 __all__.append('BuildQueue')
668 ################################################################################
670 class Component(ORMObject):
671 def __init__(self, component_name = None):
672 self.component_name = component_name
674 def __eq__(self, val):
675 if isinstance(val, str):
676 return (self.component_name == val)
677 # This signals to use the normal comparison operator
678 return NotImplemented
680 def __ne__(self, val):
681 if isinstance(val, str):
682 return (self.component_name != val)
683 # This signals to use the normal comparison operator
684 return NotImplemented
686 def properties(self):
687 return ['component_name', 'component_id', 'description', \
688 'meets_dfsg', 'overrides_count']
690 def not_null_constraints(self):
691 return ['component_name']
694 __all__.append('Component')
697 def get_component(component, session=None):
699 Returns database id for given C{component}.
701 @type component: string
702 @param component: The name of the override type
705 @return: the database id for the given component
708 component = component.lower()
710 q = session.query(Component).filter_by(component_name=component)
714 except NoResultFound:
717 __all__.append('get_component')
720 def get_mapped_component(component_name, session=None):
721 """get component after mappings
723 Evaluate component mappings from ComponentMappings in dak.conf for the
724 given component name.
726 @todo: ansgar wants to get rid of this. It's currently only used for
729 @type component_name: str
730 @param component_name: component name
732 @param session: database session
734 @rtype: L{daklib.dbconn.Component} or C{None}
735 @return: component after applying maps or C{None}
738 for m in cnf.value_list("ComponentMappings"):
739 (src, dst) = m.split()
740 if component_name == src:
742 component = session.query(Component).filter_by(component_name=component_name).first()
745 __all__.append('get_mapped_component')
748 def get_component_names(session=None):
750 Returns list of strings of component names.
753 @return: list of strings of component names
756 return [ x.component_name for x in session.query(Component).all() ]
758 __all__.append('get_component_names')
760 ################################################################################
762 class DBConfig(object):
763 def __init__(self, *args, **kwargs):
767 return '<DBConfig %s>' % self.name
769 __all__.append('DBConfig')
771 ################################################################################
774 def get_or_set_contents_file_id(filename, session=None):
776 Returns database id for given filename.
778 If no matching file is found, a row is inserted.
780 @type filename: string
781 @param filename: The filename
782 @type session: SQLAlchemy
783 @param session: Optional SQL session object (a temporary one will be
784 generated if not supplied). If not passed, a commit will be performed at
785 the end of the function, otherwise the caller is responsible for commiting.
788 @return: the database id for the given component
791 q = session.query(ContentFilename).filter_by(filename=filename)
794 ret = q.one().cafilename_id
795 except NoResultFound:
796 cf = ContentFilename()
797 cf.filename = filename
799 session.commit_or_flush()
800 ret = cf.cafilename_id
804 __all__.append('get_or_set_contents_file_id')
807 def get_contents(suite, overridetype, section=None, session=None):
809 Returns contents for a suite / overridetype combination, limiting
810 to a section if not None.
813 @param suite: Suite object
815 @type overridetype: OverrideType
816 @param overridetype: OverrideType object
818 @type section: Section
819 @param section: Optional section object to limit results to
821 @type session: SQLAlchemy
822 @param session: Optional SQL session object (a temporary one will be
823 generated if not supplied)
826 @return: ResultsProxy object set up to return tuples of (filename, section,
830 # find me all of the contents for a given suite
831 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
835 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
836 JOIN content_file_names n ON (c.filename=n.id)
837 JOIN binaries b ON (b.id=c.binary_pkg)
838 JOIN override o ON (o.package=b.package)
839 JOIN section s ON (s.id=o.section)
840 WHERE o.suite = :suiteid AND o.type = :overridetypeid
841 AND b.type=:overridetypename"""
843 vals = {'suiteid': suite.suite_id,
844 'overridetypeid': overridetype.overridetype_id,
845 'overridetypename': overridetype.overridetype}
847 if section is not None:
848 contents_q += " AND s.id = :sectionid"
849 vals['sectionid'] = section.section_id
851 contents_q += " ORDER BY fn"
853 return session.execute(contents_q, vals)
855 __all__.append('get_contents')
857 ################################################################################
859 class ContentFilepath(object):
860 def __init__(self, *args, **kwargs):
864 return '<ContentFilepath %s>' % self.filepath
866 __all__.append('ContentFilepath')
869 def get_or_set_contents_path_id(filepath, session=None):
871 Returns database id for given path.
873 If no matching file is found, a row is inserted.
875 @type filepath: string
876 @param filepath: The filepath
878 @type session: SQLAlchemy
879 @param session: Optional SQL session object (a temporary one will be
880 generated if not supplied). If not passed, a commit will be performed at
881 the end of the function, otherwise the caller is responsible for commiting.
884 @return: the database id for the given path
887 q = session.query(ContentFilepath).filter_by(filepath=filepath)
890 ret = q.one().cafilepath_id
891 except NoResultFound:
892 cf = ContentFilepath()
893 cf.filepath = filepath
895 session.commit_or_flush()
896 ret = cf.cafilepath_id
900 __all__.append('get_or_set_contents_path_id')
902 ################################################################################
904 class ContentAssociation(object):
905 def __init__(self, *args, **kwargs):
909 return '<ContentAssociation %s>' % self.ca_id
911 __all__.append('ContentAssociation')
913 def insert_content_paths(binary_id, fullpaths, session=None):
915 Make sure given path is associated with given binary id
918 @param binary_id: the id of the binary
919 @type fullpaths: list
920 @param fullpaths: the list of paths of the file being associated with the binary
921 @type session: SQLAlchemy session
922 @param session: Optional SQLAlchemy session. If this is passed, the caller
923 is responsible for ensuring a transaction has begun and committing the
924 results or rolling back based on the result code. If not passed, a commit
925 will be performed at the end of the function, otherwise the caller is
926 responsible for commiting.
928 @return: True upon success
933 session = DBConn().session()
938 def generate_path_dicts():
939 for fullpath in fullpaths:
940 if fullpath.startswith( './' ):
941 fullpath = fullpath[2:]
943 yield {'filename':fullpath, 'id': binary_id }
945 for d in generate_path_dicts():
946 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
955 traceback.print_exc()
957 # Only rollback if we set up the session ourself
964 __all__.append('insert_content_paths')
966 ################################################################################
968 class DSCFile(object):
969 def __init__(self, *args, **kwargs):
973 return '<DSCFile %s>' % self.dscfile_id
975 __all__.append('DSCFile')
978 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
980 Returns a list of DSCFiles which may be empty
982 @type dscfile_id: int (optional)
983 @param dscfile_id: the dscfile_id of the DSCFiles to find
985 @type source_id: int (optional)
986 @param source_id: the source id related to the DSCFiles to find
988 @type poolfile_id: int (optional)
989 @param poolfile_id: the poolfile id related to the DSCFiles to find
992 @return: Possibly empty list of DSCFiles
995 q = session.query(DSCFile)
997 if dscfile_id is not None:
998 q = q.filter_by(dscfile_id=dscfile_id)
1000 if source_id is not None:
1001 q = q.filter_by(source_id=source_id)
1003 if poolfile_id is not None:
1004 q = q.filter_by(poolfile_id=poolfile_id)
1008 __all__.append('get_dscfiles')
1010 ################################################################################
1012 class ExternalOverride(ORMObject):
1013 def __init__(self, *args, **kwargs):
1017 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1019 __all__.append('ExternalOverride')
1021 ################################################################################
1023 class PoolFile(ORMObject):
1024 def __init__(self, filename = None, filesize = -1, \
1026 self.filename = filename
1027 self.filesize = filesize
1028 self.md5sum = md5sum
1032 session = DBConn().session().object_session(self)
1033 af = session.query(ArchiveFile).join(Archive) \
1034 .filter(ArchiveFile.file == self) \
1035 .order_by(Archive.tainted.desc()).first()
1039 def component(self):
1040 session = DBConn().session().object_session(self)
1041 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1042 .group_by(ArchiveFile.component_id).one()
1043 return session.query(Component).get(component_id)
1047 return os.path.basename(self.filename)
1049 def is_valid(self, filesize = -1, md5sum = None):
1050 return self.filesize == long(filesize) and self.md5sum == md5sum
1052 def properties(self):
1053 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1054 'sha256sum', 'source', 'binary', 'last_used']
1056 def not_null_constraints(self):
1057 return ['filename', 'md5sum']
1059 def identical_to(self, filename):
1061 compare size and hash with the given file
1064 @return: true if the given file has the same size and hash as this object; false otherwise
1066 st = os.stat(filename)
1067 if self.filesize != st.st_size:
1070 f = open(filename, "r")
1071 sha256sum = apt_pkg.sha256sum(f)
1072 if sha256sum != self.sha256sum:
1077 __all__.append('PoolFile')
1080 def get_poolfile_like_name(filename, session=None):
1082 Returns an array of PoolFile objects which are like the given name
1084 @type filename: string
1085 @param filename: the filename of the file to check against the DB
1088 @return: array of PoolFile objects
1091 # TODO: There must be a way of properly using bind parameters with %FOO%
1092 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1096 __all__.append('get_poolfile_like_name')
1098 ################################################################################
1100 class Fingerprint(ORMObject):
1101 def __init__(self, fingerprint = None):
1102 self.fingerprint = fingerprint
1104 def properties(self):
1105 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1108 def not_null_constraints(self):
1109 return ['fingerprint']
1111 __all__.append('Fingerprint')
1114 def get_fingerprint(fpr, session=None):
1116 Returns Fingerprint object for given fpr.
1119 @param fpr: The fpr to find / add
1121 @type session: SQLAlchemy
1122 @param session: Optional SQL session object (a temporary one will be
1123 generated if not supplied).
1126 @return: the Fingerprint object for the given fpr or None
1129 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1133 except NoResultFound:
1138 __all__.append('get_fingerprint')
1141 def get_or_set_fingerprint(fpr, session=None):
1143 Returns Fingerprint object for given fpr.
1145 If no matching fpr is found, a row is inserted.
1148 @param fpr: The fpr to find / add
1150 @type session: SQLAlchemy
1151 @param session: Optional SQL session object (a temporary one will be
1152 generated if not supplied). If not passed, a commit will be performed at
1153 the end of the function, otherwise the caller is responsible for commiting.
1154 A flush will be performed either way.
1157 @return: the Fingerprint object for the given fpr
1160 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1164 except NoResultFound:
1165 fingerprint = Fingerprint()
1166 fingerprint.fingerprint = fpr
1167 session.add(fingerprint)
1168 session.commit_or_flush()
1173 __all__.append('get_or_set_fingerprint')
1175 ################################################################################
1177 # Helper routine for Keyring class
1178 def get_ldap_name(entry):
1180 for k in ["cn", "mn", "sn"]:
1182 if ret and ret[0] != "" and ret[0] != "-":
1184 return " ".join(name)
1186 ################################################################################
1188 class Keyring(object):
1189 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1190 " --with-colons --fingerprint --fingerprint"
1195 def __init__(self, *args, **kwargs):
1199 return '<Keyring %s>' % self.keyring_name
1201 def de_escape_gpg_str(self, txt):
1202 esclist = re.split(r'(\\x..)', txt)
1203 for x in range(1,len(esclist),2):
1204 esclist[x] = "%c" % (int(esclist[x][2:],16))
1205 return "".join(esclist)
1207 def parse_address(self, uid):
1208 """parses uid and returns a tuple of real name and email address"""
1210 (name, address) = email.Utils.parseaddr(uid)
1211 name = re.sub(r"\s*[(].*[)]", "", name)
1212 name = self.de_escape_gpg_str(name)
1215 return (name, address)
1217 def load_keys(self, keyring):
1218 if not self.keyring_id:
1219 raise Exception('Must be initialized with database information')
1221 k = os.popen(self.gpg_invocation % keyring, "r")
1223 need_fingerprint = False
1226 field = line.split(":")
1227 if field[0] == "pub":
1230 (name, addr) = self.parse_address(field[9])
1232 self.keys[key]["email"] = addr
1233 self.keys[key]["name"] = name
1234 need_fingerprint = True
1235 elif key and field[0] == "uid":
1236 (name, addr) = self.parse_address(field[9])
1237 if "email" not in self.keys[key] and "@" in addr:
1238 self.keys[key]["email"] = addr
1239 self.keys[key]["name"] = name
1240 elif need_fingerprint and field[0] == "fpr":
1241 self.keys[key]["fingerprints"] = [field[9]]
1242 self.fpr_lookup[field[9]] = key
1243 need_fingerprint = False
1245 def import_users_from_ldap(self, session):
1249 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1250 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1251 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1253 l = ldap.open(LDAPServer)
1256 # TODO: This should request a new context and use
1257 # connection-specific options (i.e. "l.set_option(...)")
1259 # Request a new TLS context. If there was already one, libldap
1260 # would not change the TLS options (like which CAs to trust).
1261 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1262 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1263 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1264 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1267 l.simple_bind_s("","")
1268 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1269 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1270 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1272 ldap_fin_uid_id = {}
1279 uid = entry["uid"][0]
1280 name = get_ldap_name(entry)
1281 fingerprints = entry["keyFingerPrint"]
1283 for f in fingerprints:
1284 key = self.fpr_lookup.get(f, None)
1285 if key not in self.keys:
1287 self.keys[key]["uid"] = uid
1291 keyid = get_or_set_uid(uid, session).uid_id
1292 byuid[keyid] = (uid, name)
1293 byname[uid] = (keyid, name)
1295 return (byname, byuid)
1297 def generate_users_from_keyring(self, format, session):
1301 for x in self.keys.keys():
1302 if "email" not in self.keys[x]:
1304 self.keys[x]["uid"] = format % "invalid-uid"
1306 uid = format % self.keys[x]["email"]
1307 keyid = get_or_set_uid(uid, session).uid_id
1308 byuid[keyid] = (uid, self.keys[x]["name"])
1309 byname[uid] = (keyid, self.keys[x]["name"])
1310 self.keys[x]["uid"] = uid
1313 uid = format % "invalid-uid"
1314 keyid = get_or_set_uid(uid, session).uid_id
1315 byuid[keyid] = (uid, "ungeneratable user id")
1316 byname[uid] = (keyid, "ungeneratable user id")
1318 return (byname, byuid)
1320 __all__.append('Keyring')
1323 def get_keyring(keyring, session=None):
1325 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1326 If C{keyring} already has an entry, simply return the existing Keyring
1328 @type keyring: string
1329 @param keyring: the keyring name
1332 @return: the Keyring object for this keyring
1335 q = session.query(Keyring).filter_by(keyring_name=keyring)
1339 except NoResultFound:
1342 __all__.append('get_keyring')
1345 def get_active_keyring_paths(session=None):
1348 @return: list of active keyring paths
1350 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1352 __all__.append('get_active_keyring_paths')
1355 def get_primary_keyring_path(session=None):
1357 Get the full path to the highest priority active keyring
1360 @return: path to the active keyring with the highest priority or None if no
1361 keyring is configured
1363 keyrings = get_active_keyring_paths()
1365 if len(keyrings) > 0:
1370 __all__.append('get_primary_keyring_path')
1372 ################################################################################
1374 class DBChange(object):
1375 def __init__(self, *args, **kwargs):
1379 return '<DBChange %s>' % self.changesname
1381 __all__.append('DBChange')
1384 def get_dbchange(filename, session=None):
1386 returns DBChange object for given C{filename}.
1388 @type filename: string
1389 @param filename: the name of the file
1391 @type session: Session
1392 @param session: Optional SQLA session object (a temporary one will be
1393 generated if not supplied)
1396 @return: DBChange object for the given filename (C{None} if not present)
1399 q = session.query(DBChange).filter_by(changesname=filename)
1403 except NoResultFound:
1406 __all__.append('get_dbchange')
1408 ################################################################################
1410 class Maintainer(ORMObject):
1411 def __init__(self, name = None):
1414 def properties(self):
1415 return ['name', 'maintainer_id']
1417 def not_null_constraints(self):
1420 def get_split_maintainer(self):
1421 if not hasattr(self, 'name') or self.name is None:
1422 return ('', '', '', '')
1424 return fix_maintainer(self.name.strip())
1426 __all__.append('Maintainer')
1429 def get_or_set_maintainer(name, session=None):
1431 Returns Maintainer object for given maintainer name.
1433 If no matching maintainer name is found, a row is inserted.
1436 @param name: The maintainer name to add
1438 @type session: SQLAlchemy
1439 @param session: Optional SQL session object (a temporary one will be
1440 generated if not supplied). If not passed, a commit will be performed at
1441 the end of the function, otherwise the caller is responsible for commiting.
1442 A flush will be performed either way.
1445 @return: the Maintainer object for the given maintainer
1448 q = session.query(Maintainer).filter_by(name=name)
1451 except NoResultFound:
1452 maintainer = Maintainer()
1453 maintainer.name = name
1454 session.add(maintainer)
1455 session.commit_or_flush()
1460 __all__.append('get_or_set_maintainer')
1463 def get_maintainer(maintainer_id, session=None):
1465 Return the name of the maintainer behind C{maintainer_id} or None if that
1466 maintainer_id is invalid.
1468 @type maintainer_id: int
1469 @param maintainer_id: the id of the maintainer
1472 @return: the Maintainer with this C{maintainer_id}
1475 return session.query(Maintainer).get(maintainer_id)
1477 __all__.append('get_maintainer')
1479 ################################################################################
1481 class NewComment(object):
1482 def __init__(self, *args, **kwargs):
1486 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1488 __all__.append('NewComment')
1491 def has_new_comment(policy_queue, package, version, session=None):
1493 Returns true if the given combination of C{package}, C{version} has a comment.
1495 @type package: string
1496 @param package: name of the package
1498 @type version: string
1499 @param version: package version
1501 @type session: Session
1502 @param session: Optional SQLA session object (a temporary one will be
1503 generated if not supplied)
1509 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1510 q = q.filter_by(package=package)
1511 q = q.filter_by(version=version)
1513 return bool(q.count() > 0)
1515 __all__.append('has_new_comment')
1518 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1520 Returns (possibly empty) list of NewComment objects for the given
1523 @type package: string (optional)
1524 @param package: name of the package
1526 @type version: string (optional)
1527 @param version: package version
1529 @type comment_id: int (optional)
1530 @param comment_id: An id of a comment
1532 @type session: Session
1533 @param session: Optional SQLA session object (a temporary one will be
1534 generated if not supplied)
1537 @return: A (possibly empty) list of NewComment objects will be returned
1540 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1541 if package is not None: q = q.filter_by(package=package)
1542 if version is not None: q = q.filter_by(version=version)
1543 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1547 __all__.append('get_new_comments')
1549 ################################################################################
1551 class Override(ORMObject):
1552 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1553 section = None, priority = None):
1554 self.package = package
1556 self.component = component
1557 self.overridetype = overridetype
1558 self.section = section
1559 self.priority = priority
1561 def properties(self):
1562 return ['package', 'suite', 'component', 'overridetype', 'section', \
1565 def not_null_constraints(self):
1566 return ['package', 'suite', 'component', 'overridetype', 'section']
1568 __all__.append('Override')
1571 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1573 Returns Override object for the given parameters
1575 @type package: string
1576 @param package: The name of the package
1578 @type suite: string, list or None
1579 @param suite: The name of the suite (or suites if a list) to limit to. If
1580 None, don't limit. Defaults to None.
1582 @type component: string, list or None
1583 @param component: The name of the component (or components if a list) to
1584 limit to. If None, don't limit. Defaults to None.
1586 @type overridetype: string, list or None
1587 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1588 limit to. If None, don't limit. Defaults to None.
1590 @type session: Session
1591 @param session: Optional SQLA session object (a temporary one will be
1592 generated if not supplied)
1595 @return: A (possibly empty) list of Override objects will be returned
1598 q = session.query(Override)
1599 q = q.filter_by(package=package)
1601 if suite is not None:
1602 if not isinstance(suite, list): suite = [suite]
1603 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1605 if component is not None:
1606 if not isinstance(component, list): component = [component]
1607 q = q.join(Component).filter(Component.component_name.in_(component))
1609 if overridetype is not None:
1610 if not isinstance(overridetype, list): overridetype = [overridetype]
1611 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1615 __all__.append('get_override')
1618 ################################################################################
1620 class OverrideType(ORMObject):
1621 def __init__(self, overridetype = None):
1622 self.overridetype = overridetype
1624 def properties(self):
1625 return ['overridetype', 'overridetype_id', 'overrides_count']
1627 def not_null_constraints(self):
1628 return ['overridetype']
1630 __all__.append('OverrideType')
1633 def get_override_type(override_type, session=None):
1635 Returns OverrideType object for given C{override type}.
1637 @type override_type: string
1638 @param override_type: The name of the override type
1640 @type session: Session
1641 @param session: Optional SQLA session object (a temporary one will be
1642 generated if not supplied)
1645 @return: the database id for the given override type
1648 q = session.query(OverrideType).filter_by(overridetype=override_type)
1652 except NoResultFound:
1655 __all__.append('get_override_type')
1657 ################################################################################
1659 class PolicyQueue(object):
1660 def __init__(self, *args, **kwargs):
1664 return '<PolicyQueue %s>' % self.queue_name
1666 __all__.append('PolicyQueue')
1669 def get_policy_queue(queuename, session=None):
1671 Returns PolicyQueue object for given C{queue name}
1673 @type queuename: string
1674 @param queuename: The name of the queue
1676 @type session: Session
1677 @param session: Optional SQLA session object (a temporary one will be
1678 generated if not supplied)
1681 @return: PolicyQueue object for the given queue
1684 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1688 except NoResultFound:
1691 __all__.append('get_policy_queue')
1693 ################################################################################
1695 class PolicyQueueUpload(object):
1696 def __cmp__(self, other):
1697 ret = cmp(self.changes.source, other.changes.source)
1699 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1701 if self.source is not None and other.source is None:
1703 elif self.source is None and other.source is not None:
1706 ret = cmp(self.changes.changesname, other.changes.changesname)
1709 __all__.append('PolicyQueueUpload')
1711 ################################################################################
1713 class PolicyQueueByhandFile(object):
1716 __all__.append('PolicyQueueByhandFile')
1718 ################################################################################
1720 class Priority(ORMObject):
1721 def __init__(self, priority = None, level = None):
1722 self.priority = priority
1725 def properties(self):
1726 return ['priority', 'priority_id', 'level', 'overrides_count']
1728 def not_null_constraints(self):
1729 return ['priority', 'level']
1731 def __eq__(self, val):
1732 if isinstance(val, str):
1733 return (self.priority == val)
1734 # This signals to use the normal comparison operator
1735 return NotImplemented
1737 def __ne__(self, val):
1738 if isinstance(val, str):
1739 return (self.priority != val)
1740 # This signals to use the normal comparison operator
1741 return NotImplemented
1743 __all__.append('Priority')
1746 def get_priority(priority, session=None):
1748 Returns Priority object for given C{priority name}.
1750 @type priority: string
1751 @param priority: The name of the priority
1753 @type session: Session
1754 @param session: Optional SQLA session object (a temporary one will be
1755 generated if not supplied)
1758 @return: Priority object for the given priority
1761 q = session.query(Priority).filter_by(priority=priority)
1765 except NoResultFound:
1768 __all__.append('get_priority')
1771 def get_priorities(session=None):
1773 Returns dictionary of priority names -> id mappings
1775 @type session: Session
1776 @param session: Optional SQL session object (a temporary one will be
1777 generated if not supplied)
1780 @return: dictionary of priority names -> id mappings
1784 q = session.query(Priority)
1786 ret[x.priority] = x.priority_id
1790 __all__.append('get_priorities')
1792 ################################################################################
1794 class Section(ORMObject):
1795 def __init__(self, section = None):
1796 self.section = section
1798 def properties(self):
1799 return ['section', 'section_id', 'overrides_count']
1801 def not_null_constraints(self):
1804 def __eq__(self, val):
1805 if isinstance(val, str):
1806 return (self.section == val)
1807 # This signals to use the normal comparison operator
1808 return NotImplemented
1810 def __ne__(self, val):
1811 if isinstance(val, str):
1812 return (self.section != val)
1813 # This signals to use the normal comparison operator
1814 return NotImplemented
1816 __all__.append('Section')
1819 def get_section(section, session=None):
1821 Returns Section object for given C{section name}.
1823 @type section: string
1824 @param section: The name of the section
1826 @type session: Session
1827 @param session: Optional SQLA session object (a temporary one will be
1828 generated if not supplied)
1831 @return: Section object for the given section name
1834 q = session.query(Section).filter_by(section=section)
1838 except NoResultFound:
1841 __all__.append('get_section')
1844 def get_sections(session=None):
1846 Returns dictionary of section names -> id mappings
1848 @type session: Session
1849 @param session: Optional SQL session object (a temporary one will be
1850 generated if not supplied)
1853 @return: dictionary of section names -> id mappings
1857 q = session.query(Section)
1859 ret[x.section] = x.section_id
1863 __all__.append('get_sections')
1865 ################################################################################
1867 class SignatureHistory(ORMObject):
1869 def from_signed_file(cls, signed_file):
1870 """signature history entry from signed file
1872 @type signed_file: L{daklib.gpg.SignedFile}
1873 @param signed_file: signed file
1875 @rtype: L{SignatureHistory}
1878 self.fingerprint = signed_file.primary_fingerprint
1879 self.signature_timestamp = signed_file.signature_timestamp
1880 self.contents_sha1 = signed_file.contents_sha1()
1883 __all__.append('SignatureHistory')
1885 ################################################################################
1887 class SrcContents(ORMObject):
1888 def __init__(self, file = None, source = None):
1890 self.source = source
1892 def properties(self):
1893 return ['file', 'source']
1895 __all__.append('SrcContents')
1897 ################################################################################
1899 from debian.debfile import Deb822
1901 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1902 class Dak822(Deb822):
1903 def _internal_parser(self, sequence, fields=None):
1904 # The key is non-whitespace, non-colon characters before any colon.
1905 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1906 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1907 multi = re.compile(key_part + r"$")
1908 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1910 wanted_field = lambda f: fields is None or f in fields
1912 if isinstance(sequence, basestring):
1913 sequence = sequence.splitlines()
1917 for line in self.gpg_stripped_paragraph(sequence):
1918 m = single.match(line)
1921 self[curkey] = content
1923 if not wanted_field(m.group('key')):
1927 curkey = m.group('key')
1928 content = m.group('data')
1931 m = multi.match(line)
1934 self[curkey] = content
1936 if not wanted_field(m.group('key')):
1940 curkey = m.group('key')
1944 m = multidata.match(line)
1946 content += '\n' + line # XXX not m.group('data')?
1950 self[curkey] = content
1953 class DBSource(ORMObject):
1954 def __init__(self, source = None, version = None, maintainer = None, \
1955 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1956 self.source = source
1957 self.version = version
1958 self.maintainer = maintainer
1959 self.changedby = changedby
1960 self.poolfile = poolfile
1961 self.install_date = install_date
1962 self.fingerprint = fingerprint
1966 return self.source_id
1968 def properties(self):
1969 return ['source', 'source_id', 'maintainer', 'changedby', \
1970 'fingerprint', 'poolfile', 'version', 'suites_count', \
1971 'install_date', 'binaries_count', 'uploaders_count']
1973 def not_null_constraints(self):
1974 return ['source', 'version', 'install_date', 'maintainer', \
1975 'changedby', 'poolfile']
1977 def read_control_fields(self):
1979 Reads the control information from a dsc
1982 @return: fields is the dsc information in a dictionary form
1984 fullpath = self.poolfile.fullpath
1985 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1988 metadata = association_proxy('key', 'value')
1990 def scan_contents(self):
1992 Returns a set of names for non directories. The path names are
1993 normalized after converting them from either utf-8 or iso8859-1
1996 fullpath = self.poolfile.fullpath
1997 from daklib.contents import UnpackedSource
1998 unpacked = UnpackedSource(fullpath)
2000 for name in unpacked.get_all_filenames():
2001 # enforce proper utf-8 encoding
2003 name.decode('utf-8')
2004 except UnicodeDecodeError:
2005 name = name.decode('iso8859-1').encode('utf-8')
2009 __all__.append('DBSource')
2012 def source_exists(source, source_version, suites = ["any"], session=None):
2014 Ensure that source exists somewhere in the archive for the binary
2015 upload being processed.
2016 1. exact match => 1.0-3
2017 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2019 @type source: string
2020 @param source: source name
2022 @type source_version: string
2023 @param source_version: expected source version
2026 @param suites: list of suites to check in, default I{any}
2028 @type session: Session
2029 @param session: Optional SQLA session object (a temporary one will be
2030 generated if not supplied)
2033 @return: returns 1 if a source with expected version is found, otherwise 0
2040 from daklib.regexes import re_bin_only_nmu
2041 orig_source_version = re_bin_only_nmu.sub('', source_version)
2043 for suite in suites:
2044 q = session.query(DBSource).filter_by(source=source). \
2045 filter(DBSource.version.in_([source_version, orig_source_version]))
2047 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2048 s = get_suite(suite, session)
2050 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2051 considered_suites = [ vc.reference for vc in enhances_vcs ]
2052 considered_suites.append(s)
2054 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2059 # No source found so return not ok
2064 __all__.append('source_exists')
2067 def get_suites_source_in(source, session=None):
2069 Returns list of Suite objects which given C{source} name is in
2072 @param source: DBSource package name to search for
2075 @return: list of Suite objects for the given source
2078 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2080 __all__.append('get_suites_source_in')
2083 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2085 Returns list of DBSource objects for given C{source} name and other parameters
2088 @param source: DBSource package name to search for
2090 @type version: str or None
2091 @param version: DBSource version name to search for or None if not applicable
2093 @type dm_upload_allowed: bool
2094 @param dm_upload_allowed: If None, no effect. If True or False, only
2095 return packages with that dm_upload_allowed setting
2097 @type session: Session
2098 @param session: Optional SQL session object (a temporary one will be
2099 generated if not supplied)
2102 @return: list of DBSource objects for the given name (may be empty)
2105 q = session.query(DBSource).filter_by(source=source)
2107 if version is not None:
2108 q = q.filter_by(version=version)
2110 if dm_upload_allowed is not None:
2111 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2115 __all__.append('get_sources_from_name')
2117 # FIXME: This function fails badly if it finds more than 1 source package and
2118 # its implementation is trivial enough to be inlined.
2120 def get_source_in_suite(source, suite, session=None):
2122 Returns a DBSource object for a combination of C{source} and C{suite}.
2124 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2125 - B{suite} - a suite name, eg. I{unstable}
2127 @type source: string
2128 @param source: source package name
2131 @param suite: the suite name
2134 @return: the version for I{source} in I{suite}
2138 q = get_suite(suite, session).get_sources(source)
2141 except NoResultFound:
2144 __all__.append('get_source_in_suite')
2147 def import_metadata_into_db(obj, session=None):
2149 This routine works on either DBBinary or DBSource objects and imports
2150 their metadata into the database
2152 fields = obj.read_control_fields()
2153 for k in fields.keys():
2156 val = str(fields[k])
2157 except UnicodeEncodeError:
2158 # Fall back to UTF-8
2160 val = fields[k].encode('utf-8')
2161 except UnicodeEncodeError:
2162 # Finally try iso8859-1
2163 val = fields[k].encode('iso8859-1')
2164 # Otherwise we allow the exception to percolate up and we cause
2165 # a reject as someone is playing silly buggers
2167 obj.metadata[get_or_set_metadatakey(k, session)] = val
2169 session.commit_or_flush()
2171 __all__.append('import_metadata_into_db')
2173 ################################################################################
2175 class SrcFormat(object):
2176 def __init__(self, *args, **kwargs):
2180 return '<SrcFormat %s>' % (self.format_name)
2182 __all__.append('SrcFormat')
2184 ################################################################################
2186 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2187 ('SuiteID', 'suite_id'),
2188 ('Version', 'version'),
2189 ('Origin', 'origin'),
2191 ('Description', 'description'),
2192 ('Untouchable', 'untouchable'),
2193 ('Announce', 'announce'),
2194 ('Codename', 'codename'),
2195 ('OverrideCodename', 'overridecodename'),
2196 ('ValidTime', 'validtime'),
2197 ('Priority', 'priority'),
2198 ('NotAutomatic', 'notautomatic'),
2199 ('CopyChanges', 'copychanges'),
2200 ('OverrideSuite', 'overridesuite')]
2202 # Why the heck don't we have any UNIQUE constraints in table suite?
2203 # TODO: Add UNIQUE constraints for appropriate columns.
2204 class Suite(ORMObject):
2205 def __init__(self, suite_name = None, version = None):
2206 self.suite_name = suite_name
2207 self.version = version
2209 def properties(self):
2210 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2213 def not_null_constraints(self):
2214 return ['suite_name']
2216 def __eq__(self, val):
2217 if isinstance(val, str):
2218 return (self.suite_name == val)
2219 # This signals to use the normal comparison operator
2220 return NotImplemented
2222 def __ne__(self, val):
2223 if isinstance(val, str):
2224 return (self.suite_name != val)
2225 # This signals to use the normal comparison operator
2226 return NotImplemented
2230 for disp, field in SUITE_FIELDS:
2231 val = getattr(self, field, None)
2233 ret.append("%s: %s" % (disp, val))
2235 return "\n".join(ret)
2237 def get_architectures(self, skipsrc=False, skipall=False):
2239 Returns list of Architecture objects
2241 @type skipsrc: boolean
2242 @param skipsrc: Whether to skip returning the 'source' architecture entry
2245 @type skipall: boolean
2246 @param skipall: Whether to skip returning the 'all' architecture entry
2250 @return: list of Architecture objects for the given name (may be empty)
2253 q = object_session(self).query(Architecture).with_parent(self)
2255 q = q.filter(Architecture.arch_string != 'source')
2257 q = q.filter(Architecture.arch_string != 'all')
2258 return q.order_by(Architecture.arch_string).all()
2260 def get_sources(self, source):
2262 Returns a query object representing DBSource that is part of C{suite}.
2264 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2266 @type source: string
2267 @param source: source package name
2269 @rtype: sqlalchemy.orm.query.Query
2270 @return: a query of DBSource
2274 session = object_session(self)
2275 return session.query(DBSource).filter_by(source = source). \
2278 def get_overridesuite(self):
2279 if self.overridesuite is None:
2282 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2286 return os.path.join(self.archive.path, 'dists', self.suite_name)
2288 __all__.append('Suite')
2291 def get_suite(suite, session=None):
2293 Returns Suite object for given C{suite name}.
2296 @param suite: The name of the suite
2298 @type session: Session
2299 @param session: Optional SQLA session object (a temporary one will be
2300 generated if not supplied)
2303 @return: Suite object for the requested suite name (None if not present)
2306 q = session.query(Suite).filter_by(suite_name=suite)
2310 except NoResultFound:
2313 __all__.append('get_suite')
2315 ################################################################################
2318 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2320 Returns list of Architecture objects for given C{suite} name. The list is
2321 empty if suite does not exist.
2324 @param suite: Suite name to search for
2326 @type skipsrc: boolean
2327 @param skipsrc: Whether to skip returning the 'source' architecture entry
2330 @type skipall: boolean
2331 @param skipall: Whether to skip returning the 'all' architecture entry
2334 @type session: Session
2335 @param session: Optional SQL session object (a temporary one will be
2336 generated if not supplied)
2339 @return: list of Architecture objects for the given name (may be empty)
2343 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2344 except AttributeError:
2347 __all__.append('get_suite_architectures')
2349 ################################################################################
2351 class Uid(ORMObject):
2352 def __init__(self, uid = None, name = None):
2356 def __eq__(self, val):
2357 if isinstance(val, str):
2358 return (self.uid == val)
2359 # This signals to use the normal comparison operator
2360 return NotImplemented
2362 def __ne__(self, val):
2363 if isinstance(val, str):
2364 return (self.uid != val)
2365 # This signals to use the normal comparison operator
2366 return NotImplemented
2368 def properties(self):
2369 return ['uid', 'name', 'fingerprint']
2371 def not_null_constraints(self):
2374 __all__.append('Uid')
2377 def get_or_set_uid(uidname, session=None):
2379 Returns uid object for given uidname.
2381 If no matching uidname is found, a row is inserted.
2383 @type uidname: string
2384 @param uidname: The uid to add
2386 @type session: SQLAlchemy
2387 @param session: Optional SQL session object (a temporary one will be
2388 generated if not supplied). If not passed, a commit will be performed at
2389 the end of the function, otherwise the caller is responsible for commiting.
2392 @return: the uid object for the given uidname
2395 q = session.query(Uid).filter_by(uid=uidname)
2399 except NoResultFound:
2403 session.commit_or_flush()
2408 __all__.append('get_or_set_uid')
2411 def get_uid_from_fingerprint(fpr, session=None):
2412 q = session.query(Uid)
2413 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2417 except NoResultFound:
2420 __all__.append('get_uid_from_fingerprint')
2422 ################################################################################
2424 class MetadataKey(ORMObject):
2425 def __init__(self, key = None):
2428 def properties(self):
2431 def not_null_constraints(self):
2434 __all__.append('MetadataKey')
2437 def get_or_set_metadatakey(keyname, session=None):
2439 Returns MetadataKey object for given uidname.
2441 If no matching keyname is found, a row is inserted.
2443 @type uidname: string
2444 @param uidname: The keyname to add
2446 @type session: SQLAlchemy
2447 @param session: Optional SQL session object (a temporary one will be
2448 generated if not supplied). If not passed, a commit will be performed at
2449 the end of the function, otherwise the caller is responsible for commiting.
2452 @return: the metadatakey object for the given keyname
2455 q = session.query(MetadataKey).filter_by(key=keyname)
2459 except NoResultFound:
2460 ret = MetadataKey(keyname)
2462 session.commit_or_flush()
2466 __all__.append('get_or_set_metadatakey')
2468 ################################################################################
2470 class BinaryMetadata(ORMObject):
2471 def __init__(self, key = None, value = None, binary = None):
2474 self.binary = binary
2476 def properties(self):
2477 return ['binary', 'key', 'value']
2479 def not_null_constraints(self):
2482 __all__.append('BinaryMetadata')
2484 ################################################################################
2486 class SourceMetadata(ORMObject):
2487 def __init__(self, key = None, value = None, source = None):
2490 self.source = source
2492 def properties(self):
2493 return ['source', 'key', 'value']
2495 def not_null_constraints(self):
2498 __all__.append('SourceMetadata')
2500 ################################################################################
2502 class VersionCheck(ORMObject):
2503 def __init__(self, *args, **kwargs):
2506 def properties(self):
2507 #return ['suite_id', 'check', 'reference_id']
2510 def not_null_constraints(self):
2511 return ['suite', 'check', 'reference']
2513 __all__.append('VersionCheck')
2516 def get_version_checks(suite_name, check = None, session = None):
2517 suite = get_suite(suite_name, session)
2519 # Make sure that what we return is iterable so that list comprehensions
2520 # involving this don't cause a traceback
2522 q = session.query(VersionCheck).filter_by(suite=suite)
2524 q = q.filter_by(check=check)
2527 __all__.append('get_version_checks')
2529 ################################################################################
2531 class DBConn(object):
2533 database module init.
2537 def __init__(self, *args, **kwargs):
2538 self.__dict__ = self.__shared_state
2540 if not getattr(self, 'initialised', False):
2541 self.initialised = True
2542 self.debug = kwargs.has_key('debug')
2545 def __setuptables(self):
2548 'acl_architecture_map',
2549 'acl_fingerprint_map',
2556 'binaries_metadata',
2563 'external_overrides',
2564 'extra_src_references',
2566 'files_archive_map',
2572 # TODO: the maintainer column in table override should be removed.
2576 'policy_queue_upload',
2577 'policy_queue_upload_binaries_map',
2578 'policy_queue_byhand_file',
2581 'signature_history',
2590 'suite_architectures',
2591 'suite_build_queue_copy',
2592 'suite_src_formats',
2598 'almost_obsolete_all_associations',
2599 'almost_obsolete_src_associations',
2600 'any_associations_source',
2601 'bin_associations_binaries',
2602 'binaries_suite_arch',
2605 'newest_all_associations',
2606 'newest_any_associations',
2608 'newest_src_association',
2609 'obsolete_all_associations',
2610 'obsolete_any_associations',
2611 'obsolete_any_by_all_associations',
2612 'obsolete_src_associations',
2614 'src_associations_bin',
2615 'src_associations_src',
2616 'suite_arch_by_name',
2619 for table_name in tables:
2620 table = Table(table_name, self.db_meta, \
2621 autoload=True, useexisting=True)
2622 setattr(self, 'tbl_%s' % table_name, table)
2624 for view_name in views:
2625 view = Table(view_name, self.db_meta, autoload=True)
2626 setattr(self, 'view_%s' % view_name, view)
2628 def __setupmappers(self):
2629 mapper(Architecture, self.tbl_architecture,
2630 properties = dict(arch_id = self.tbl_architecture.c.id,
2631 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2632 order_by=self.tbl_suite.c.suite_name,
2633 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2634 extension = validator)
2636 mapper(ACL, self.tbl_acl,
2638 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2639 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2640 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2641 per_source = relation(ACLPerSource, collection_class=set),
2644 mapper(ACLPerSource, self.tbl_acl_per_source,
2646 acl = relation(ACL),
2647 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2648 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2651 mapper(Archive, self.tbl_archive,
2652 properties = dict(archive_id = self.tbl_archive.c.id,
2653 archive_name = self.tbl_archive.c.name))
2655 mapper(ArchiveFile, self.tbl_files_archive_map,
2656 properties = dict(archive = relation(Archive, backref='files'),
2657 component = relation(Component),
2658 file = relation(PoolFile, backref='archives')))
2660 mapper(BuildQueue, self.tbl_build_queue,
2661 properties = dict(queue_id = self.tbl_build_queue.c.id,
2662 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2664 mapper(DBBinary, self.tbl_binaries,
2665 properties = dict(binary_id = self.tbl_binaries.c.id,
2666 package = self.tbl_binaries.c.package,
2667 version = self.tbl_binaries.c.version,
2668 maintainer_id = self.tbl_binaries.c.maintainer,
2669 maintainer = relation(Maintainer),
2670 source_id = self.tbl_binaries.c.source,
2671 source = relation(DBSource, backref='binaries'),
2672 arch_id = self.tbl_binaries.c.architecture,
2673 architecture = relation(Architecture),
2674 poolfile_id = self.tbl_binaries.c.file,
2675 poolfile = relation(PoolFile),
2676 binarytype = self.tbl_binaries.c.type,
2677 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2678 fingerprint = relation(Fingerprint),
2679 install_date = self.tbl_binaries.c.install_date,
2680 suites = relation(Suite, secondary=self.tbl_bin_associations,
2681 backref=backref('binaries', lazy='dynamic')),
2682 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2683 backref=backref('extra_binary_references', lazy='dynamic')),
2684 key = relation(BinaryMetadata, cascade='all',
2685 collection_class=attribute_mapped_collection('key'))),
2686 extension = validator)
2688 mapper(Component, self.tbl_component,
2689 properties = dict(component_id = self.tbl_component.c.id,
2690 component_name = self.tbl_component.c.name),
2691 extension = validator)
2693 mapper(DBConfig, self.tbl_config,
2694 properties = dict(config_id = self.tbl_config.c.id))
2696 mapper(DSCFile, self.tbl_dsc_files,
2697 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2698 source_id = self.tbl_dsc_files.c.source,
2699 source = relation(DBSource),
2700 poolfile_id = self.tbl_dsc_files.c.file,
2701 poolfile = relation(PoolFile)))
2703 mapper(ExternalOverride, self.tbl_external_overrides,
2705 suite_id = self.tbl_external_overrides.c.suite,
2706 suite = relation(Suite),
2707 component_id = self.tbl_external_overrides.c.component,
2708 component = relation(Component)))
2710 mapper(PoolFile, self.tbl_files,
2711 properties = dict(file_id = self.tbl_files.c.id,
2712 filesize = self.tbl_files.c.size),
2713 extension = validator)
2715 mapper(Fingerprint, self.tbl_fingerprint,
2716 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2717 uid_id = self.tbl_fingerprint.c.uid,
2718 uid = relation(Uid),
2719 keyring_id = self.tbl_fingerprint.c.keyring,
2720 keyring = relation(Keyring),
2721 acl = relation(ACL)),
2722 extension = validator)
2724 mapper(Keyring, self.tbl_keyrings,
2725 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2726 keyring_id = self.tbl_keyrings.c.id,
2727 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2729 mapper(DBChange, self.tbl_changes,
2730 properties = dict(change_id = self.tbl_changes.c.id,
2731 seen = self.tbl_changes.c.seen,
2732 source = self.tbl_changes.c.source,
2733 binaries = self.tbl_changes.c.binaries,
2734 architecture = self.tbl_changes.c.architecture,
2735 distribution = self.tbl_changes.c.distribution,
2736 urgency = self.tbl_changes.c.urgency,
2737 maintainer = self.tbl_changes.c.maintainer,
2738 changedby = self.tbl_changes.c.changedby,
2739 date = self.tbl_changes.c.date,
2740 version = self.tbl_changes.c.version))
2742 mapper(Maintainer, self.tbl_maintainer,
2743 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2744 maintains_sources = relation(DBSource, backref='maintainer',
2745 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2746 changed_sources = relation(DBSource, backref='changedby',
2747 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2748 extension = validator)
2750 mapper(NewComment, self.tbl_new_comments,
2751 properties = dict(comment_id = self.tbl_new_comments.c.id,
2752 policy_queue = relation(PolicyQueue)))
2754 mapper(Override, self.tbl_override,
2755 properties = dict(suite_id = self.tbl_override.c.suite,
2756 suite = relation(Suite, \
2757 backref=backref('overrides', lazy='dynamic')),
2758 package = self.tbl_override.c.package,
2759 component_id = self.tbl_override.c.component,
2760 component = relation(Component, \
2761 backref=backref('overrides', lazy='dynamic')),
2762 priority_id = self.tbl_override.c.priority,
2763 priority = relation(Priority, \
2764 backref=backref('overrides', lazy='dynamic')),
2765 section_id = self.tbl_override.c.section,
2766 section = relation(Section, \
2767 backref=backref('overrides', lazy='dynamic')),
2768 overridetype_id = self.tbl_override.c.type,
2769 overridetype = relation(OverrideType, \
2770 backref=backref('overrides', lazy='dynamic'))))
2772 mapper(OverrideType, self.tbl_override_type,
2773 properties = dict(overridetype = self.tbl_override_type.c.type,
2774 overridetype_id = self.tbl_override_type.c.id))
2776 mapper(PolicyQueue, self.tbl_policy_queue,
2777 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2778 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2780 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2782 changes = relation(DBChange),
2783 policy_queue = relation(PolicyQueue, backref='uploads'),
2784 target_suite = relation(Suite),
2785 source = relation(DBSource),
2786 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2789 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2791 upload = relation(PolicyQueueUpload, backref='byhand'),
2795 mapper(Priority, self.tbl_priority,
2796 properties = dict(priority_id = self.tbl_priority.c.id))
2798 mapper(Section, self.tbl_section,
2799 properties = dict(section_id = self.tbl_section.c.id,
2800 section=self.tbl_section.c.section))
2802 mapper(SignatureHistory, self.tbl_signature_history)
2804 mapper(DBSource, self.tbl_source,
2805 properties = dict(source_id = self.tbl_source.c.id,
2806 version = self.tbl_source.c.version,
2807 maintainer_id = self.tbl_source.c.maintainer,
2808 poolfile_id = self.tbl_source.c.file,
2809 poolfile = relation(PoolFile),
2810 fingerprint_id = self.tbl_source.c.sig_fpr,
2811 fingerprint = relation(Fingerprint),
2812 changedby_id = self.tbl_source.c.changedby,
2813 srcfiles = relation(DSCFile,
2814 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2815 suites = relation(Suite, secondary=self.tbl_src_associations,
2816 backref=backref('sources', lazy='dynamic')),
2817 uploaders = relation(Maintainer,
2818 secondary=self.tbl_src_uploaders),
2819 key = relation(SourceMetadata, cascade='all',
2820 collection_class=attribute_mapped_collection('key'))),
2821 extension = validator)
2823 mapper(SrcFormat, self.tbl_src_format,
2824 properties = dict(src_format_id = self.tbl_src_format.c.id,
2825 format_name = self.tbl_src_format.c.format_name))
2827 mapper(Suite, self.tbl_suite,
2828 properties = dict(suite_id = self.tbl_suite.c.id,
2829 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2830 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2831 copy_queues = relation(BuildQueue,
2832 secondary=self.tbl_suite_build_queue_copy),
2833 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2834 backref=backref('suites', lazy='dynamic')),
2835 archive = relation(Archive, backref='suites'),
2836 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set)),
2837 extension = validator)
2839 mapper(Uid, self.tbl_uid,
2840 properties = dict(uid_id = self.tbl_uid.c.id,
2841 fingerprint = relation(Fingerprint)),
2842 extension = validator)
2844 mapper(BinContents, self.tbl_bin_contents,
2846 binary = relation(DBBinary,
2847 backref=backref('contents', lazy='dynamic', cascade='all')),
2848 file = self.tbl_bin_contents.c.file))
2850 mapper(SrcContents, self.tbl_src_contents,
2852 source = relation(DBSource,
2853 backref=backref('contents', lazy='dynamic', cascade='all')),
2854 file = self.tbl_src_contents.c.file))
2856 mapper(MetadataKey, self.tbl_metadata_keys,
2858 key_id = self.tbl_metadata_keys.c.key_id,
2859 key = self.tbl_metadata_keys.c.key))
2861 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2863 binary_id = self.tbl_binaries_metadata.c.bin_id,
2864 binary = relation(DBBinary),
2865 key_id = self.tbl_binaries_metadata.c.key_id,
2866 key = relation(MetadataKey),
2867 value = self.tbl_binaries_metadata.c.value))
2869 mapper(SourceMetadata, self.tbl_source_metadata,
2871 source_id = self.tbl_source_metadata.c.src_id,
2872 source = relation(DBSource),
2873 key_id = self.tbl_source_metadata.c.key_id,
2874 key = relation(MetadataKey),
2875 value = self.tbl_source_metadata.c.value))
2877 mapper(VersionCheck, self.tbl_version_check,
2879 suite_id = self.tbl_version_check.c.suite,
2880 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2881 reference_id = self.tbl_version_check.c.reference,
2882 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2884 ## Connection functions
2885 def __createconn(self):
2886 from config import Config
2888 if cnf.has_key("DB::Service"):
2889 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2890 elif cnf.has_key("DB::Host"):
2892 connstr = "postgresql://%s" % cnf["DB::Host"]
2893 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2894 connstr += ":%s" % cnf["DB::Port"]
2895 connstr += "/%s" % cnf["DB::Name"]
2898 connstr = "postgresql:///%s" % cnf["DB::Name"]
2899 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2900 connstr += "?port=%s" % cnf["DB::Port"]
2902 engine_args = { 'echo': self.debug }
2903 if cnf.has_key('DB::PoolSize'):
2904 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2905 if cnf.has_key('DB::MaxOverflow'):
2906 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2907 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2908 cnf['DB::Unicode'] == 'false':
2909 engine_args['use_native_unicode'] = False
2911 # Monkey patch a new dialect in in order to support service= syntax
2912 import sqlalchemy.dialects.postgresql
2913 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2914 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2915 def create_connect_args(self, url):
2916 if str(url).startswith('postgresql://service='):
2918 servicename = str(url)[21:]
2919 return (['service=%s' % servicename], {})
2921 return PGDialect_psycopg2.create_connect_args(self, url)
2923 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2926 self.db_pg = create_engine(connstr, **engine_args)
2927 self.db_meta = MetaData()
2928 self.db_meta.bind = self.db_pg
2929 self.db_smaker = sessionmaker(bind=self.db_pg,
2933 self.__setuptables()
2934 self.__setupmappers()
2936 except OperationalError as e:
2938 utils.fubar("Cannot connect to database (%s)" % str(e))
2940 self.pid = os.getpid()
2942 def session(self, work_mem = 0):
2944 Returns a new session object. If a work_mem parameter is provided a new
2945 transaction is started and the work_mem parameter is set for this
2946 transaction. The work_mem parameter is measured in MB. A default value
2947 will be used if the parameter is not set.
2949 # reinitialize DBConn in new processes
2950 if self.pid != os.getpid():
2953 session = self.db_smaker()
2955 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2958 __all__.append('DBConn')