5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 warnings.filterwarnings('ignore', \
85 "Predicate of partial index .* ignored during reflection", \
89 ################################################################################
91 # Patch in support for the debversion field type so that it works during
95 # that is for sqlalchemy 0.6
96 UserDefinedType = sqltypes.UserDefinedType
98 # this one for sqlalchemy 0.5
99 UserDefinedType = sqltypes.TypeEngine
101 class DebVersion(UserDefinedType):
102 def get_col_spec(self):
105 def bind_processor(self, dialect):
108 # ' = None' is needed for sqlalchemy 0.5:
109 def result_processor(self, dialect, coltype = None):
112 sa_major_version = sqlalchemy.__version__[0:3]
113 if sa_major_version in ["0.5", "0.6", "0.7"]:
114 from sqlalchemy.databases import postgres
115 postgres.ischema_names['debversion'] = DebVersion
117 raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py")
119 ################################################################################
121 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
123 ################################################################################
125 def session_wrapper(fn):
127 Wrapper around common ".., session=None):" handling. If the wrapped
128 function is called without passing 'session', we create a local one
129 and destroy it when the function ends.
131 Also attaches a commit_or_flush method to the session; if we created a
132 local session, this is a synonym for session.commit(), otherwise it is a
133 synonym for session.flush().
136 def wrapped(*args, **kwargs):
137 private_transaction = False
139 # Find the session object
140 session = kwargs.get('session')
143 if len(args) <= len(getargspec(fn)[0]) - 1:
144 # No session specified as last argument or in kwargs
145 private_transaction = True
146 session = kwargs['session'] = DBConn().session()
148 # Session is last argument in args
152 session = args[-1] = DBConn().session()
153 private_transaction = True
155 if private_transaction:
156 session.commit_or_flush = session.commit
158 session.commit_or_flush = session.flush
161 return fn(*args, **kwargs)
163 if private_transaction:
164 # We created a session; close it.
167 wrapped.__doc__ = fn.__doc__
168 wrapped.func_name = fn.func_name
172 __all__.append('session_wrapper')
174 ################################################################################
176 class ORMObject(object):
178 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
179 derived classes must implement the properties() method.
182 def properties(self):
184 This method should be implemented by all derived classes and returns a
185 list of the important properties. The properties 'created' and
186 'modified' will be added automatically. A suffix '_count' should be
187 added to properties that are lists or query objects. The most important
188 property name should be returned as the first element in the list
189 because it is used by repr().
195 Returns a JSON representation of the object based on the properties
196 returned from the properties() method.
199 # add created and modified
200 all_properties = self.properties() + ['created', 'modified']
201 for property in all_properties:
202 # check for list or query
203 if property[-6:] == '_count':
204 real_property = property[:-6]
205 if not hasattr(self, real_property):
207 value = getattr(self, real_property)
208 if hasattr(value, '__len__'):
211 elif hasattr(value, 'count'):
212 # query (but not during validation)
213 if self.in_validation:
215 value = value.count()
217 raise KeyError('Do not understand property %s.' % property)
219 if not hasattr(self, property):
222 value = getattr(self, property)
226 elif isinstance(value, ORMObject):
227 # use repr() for ORMObject types
230 # we want a string for all other types because json cannot
233 data[property] = value
234 return json.dumps(data)
238 Returns the name of the class.
240 return type(self).__name__
244 Returns a short string representation of the object using the first
245 element from the properties() method.
247 primary_property = self.properties()[0]
248 value = getattr(self, primary_property)
249 return '<%s %s>' % (self.classname(), str(value))
253 Returns a human readable form of the object using the properties()
256 return '<%s %s>' % (self.classname(), self.json())
258 def not_null_constraints(self):
260 Returns a list of properties that must be not NULL. Derived classes
261 should override this method if needed.
265 validation_message = \
266 "Validation failed because property '%s' must not be empty in object\n%s"
268 in_validation = False
272 This function validates the not NULL constraints as returned by
273 not_null_constraints(). It raises the DBUpdateError exception if
276 for property in self.not_null_constraints():
277 # TODO: It is a bit awkward that the mapper configuration allow
278 # directly setting the numeric _id columns. We should get rid of it
280 if hasattr(self, property + '_id') and \
281 getattr(self, property + '_id') is not None:
283 if not hasattr(self, property) or getattr(self, property) is None:
284 # str() might lead to races due to a 2nd flush
285 self.in_validation = True
286 message = self.validation_message % (property, str(self))
287 self.in_validation = False
288 raise DBUpdateError(message)
292 def get(cls, primary_key, session = None):
294 This is a support function that allows getting an object by its primary
297 Architecture.get(3[, session])
299 instead of the more verbose
301 session.query(Architecture).get(3)
303 return session.query(cls).get(primary_key)
305 def session(self, replace = False):
307 Returns the current session that is associated with the object. May
308 return None is object is in detached state.
311 return object_session(self)
313 def clone(self, session = None):
315 Clones the current object in a new session and returns the new clone. A
316 fresh session is created if the optional session parameter is not
317 provided. The function will fail if a session is provided and has
320 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
321 an existing object to allow several threads to work with their own
322 instances of an ORMObject.
324 WARNING: Only persistent (committed) objects can be cloned. Changes
325 made to the original object that are not committed yet will get lost.
326 The session of the new object will always be rolled back to avoid
330 if self.session() is None:
331 raise RuntimeError( \
332 'Method clone() failed for detached object:\n%s' % self)
333 self.session().flush()
334 mapper = object_mapper(self)
335 primary_key = mapper.primary_key_from_instance(self)
336 object_class = self.__class__
338 session = DBConn().session()
339 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
340 raise RuntimeError( \
341 'Method clone() failed due to unflushed changes in session.')
342 new_object = session.query(object_class).get(primary_key)
344 if new_object is None:
345 raise RuntimeError( \
346 'Method clone() failed for non-persistent object:\n%s' % self)
349 __all__.append('ORMObject')
351 ################################################################################
353 class Validator(MapperExtension):
355 This class calls the validate() method for each instance for the
356 'before_update' and 'before_insert' events. A global object validator is
357 used for configuring the individual mappers.
360 def before_update(self, mapper, connection, instance):
364 def before_insert(self, mapper, connection, instance):
368 validator = Validator()
370 ################################################################################
372 class ACL(ORMObject):
374 return "<ACL {0}>".format(self.name)
376 __all__.append('ACL')
378 class ACLPerSource(ORMObject):
380 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
382 __all__.append('ACLPerSource')
384 ################################################################################
386 class Architecture(ORMObject):
387 def __init__(self, arch_string = None, description = None):
388 self.arch_string = arch_string
389 self.description = description
391 def __eq__(self, val):
392 if isinstance(val, str):
393 return (self.arch_string== val)
394 # This signals to use the normal comparison operator
395 return NotImplemented
397 def __ne__(self, val):
398 if isinstance(val, str):
399 return (self.arch_string != val)
400 # This signals to use the normal comparison operator
401 return NotImplemented
403 def properties(self):
404 return ['arch_string', 'arch_id', 'suites_count']
406 def not_null_constraints(self):
407 return ['arch_string']
409 __all__.append('Architecture')
412 def get_architecture(architecture, session=None):
414 Returns database id for given C{architecture}.
416 @type architecture: string
417 @param architecture: The name of the architecture
419 @type session: Session
420 @param session: Optional SQLA session object (a temporary one will be
421 generated if not supplied)
424 @return: Architecture object for the given arch (None if not present)
427 q = session.query(Architecture).filter_by(arch_string=architecture)
431 except NoResultFound:
434 __all__.append('get_architecture')
436 # TODO: should be removed because the implementation is too trivial
438 def get_architecture_suites(architecture, session=None):
440 Returns list of Suite objects for given C{architecture} name
442 @type architecture: str
443 @param architecture: Architecture name to search for
445 @type session: Session
446 @param session: Optional SQL session object (a temporary one will be
447 generated if not supplied)
450 @return: list of Suite objects for the given name (may be empty)
453 return get_architecture(architecture, session).suites
455 __all__.append('get_architecture_suites')
457 ################################################################################
459 class Archive(object):
460 def __init__(self, *args, **kwargs):
464 return '<Archive %s>' % self.archive_name
466 __all__.append('Archive')
469 def get_archive(archive, session=None):
471 returns database id for given C{archive}.
473 @type archive: string
474 @param archive: the name of the arhive
476 @type session: Session
477 @param session: Optional SQLA session object (a temporary one will be
478 generated if not supplied)
481 @return: Archive object for the given name (None if not present)
484 archive = archive.lower()
486 q = session.query(Archive).filter_by(archive_name=archive)
490 except NoResultFound:
493 __all__.append('get_archive')
495 ################################################################################
497 class ArchiveFile(object):
498 def __init__(self, archive=None, component=None, file=None):
499 self.archive = archive
500 self.component = component
504 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
506 __all__.append('ArchiveFile')
508 ################################################################################
510 class BinContents(ORMObject):
511 def __init__(self, file = None, binary = None):
515 def properties(self):
516 return ['file', 'binary']
518 __all__.append('BinContents')
520 ################################################################################
522 def subprocess_setup():
523 # Python installs a SIGPIPE handler by default. This is usually not what
524 # non-Python subprocesses expect.
525 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
527 class DBBinary(ORMObject):
528 def __init__(self, package = None, source = None, version = None, \
529 maintainer = None, architecture = None, poolfile = None, \
530 binarytype = 'deb', fingerprint=None):
531 self.package = package
533 self.version = version
534 self.maintainer = maintainer
535 self.architecture = architecture
536 self.poolfile = poolfile
537 self.binarytype = binarytype
538 self.fingerprint = fingerprint
542 return self.binary_id
544 def properties(self):
545 return ['package', 'version', 'maintainer', 'source', 'architecture', \
546 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
547 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
549 def not_null_constraints(self):
550 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
553 metadata = association_proxy('key', 'value')
555 def scan_contents(self):
557 Yields the contents of the package. Only regular files are yielded and
558 the path names are normalized after converting them from either utf-8
559 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
560 package does not contain any regular file.
562 fullpath = self.poolfile.fullpath
563 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
564 preexec_fn = subprocess_setup)
565 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
566 for member in tar.getmembers():
567 if not member.isdir():
568 name = normpath(member.name)
569 # enforce proper utf-8 encoding
572 except UnicodeDecodeError:
573 name = name.decode('iso8859-1').encode('utf-8')
579 def read_control(self):
581 Reads the control information from a binary.
584 @return: stanza text of the control section.
587 fullpath = self.poolfile.fullpath
588 deb_file = open(fullpath, 'r')
589 stanza = utils.deb_extract_control(deb_file)
594 def read_control_fields(self):
596 Reads the control information from a binary and return
600 @return: fields of the control section as a dictionary.
603 stanza = self.read_control()
604 return apt_pkg.TagSection(stanza)
606 __all__.append('DBBinary')
609 def get_suites_binary_in(package, session=None):
611 Returns list of Suite objects which given C{package} name is in
614 @param package: DBBinary package name to search for
617 @return: list of Suite objects for the given package
620 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
622 __all__.append('get_suites_binary_in')
625 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
627 Returns the component name of the newest binary package in suite_list or
628 None if no package is found. The result can be optionally filtered by a list
629 of architecture names.
632 @param package: DBBinary package name to search for
634 @type suite_list: list of str
635 @param suite_list: list of suite_name items
637 @type arch_list: list of str
638 @param arch_list: optional list of arch_string items that defaults to []
640 @rtype: str or NoneType
641 @return: name of component or None
644 q = session.query(DBBinary).filter_by(package = package). \
645 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
646 if len(arch_list) > 0:
647 q = q.join(DBBinary.architecture). \
648 filter(Architecture.arch_string.in_(arch_list))
649 binary = q.order_by(desc(DBBinary.version)).first()
653 return binary.poolfile.component.component_name
655 __all__.append('get_component_by_package_suite')
657 ################################################################################
659 class BuildQueue(object):
660 def __init__(self, *args, **kwargs):
664 return '<BuildQueue %s>' % self.queue_name
666 __all__.append('BuildQueue')
668 ################################################################################
670 class Component(ORMObject):
671 def __init__(self, component_name = None):
672 self.component_name = component_name
674 def __eq__(self, val):
675 if isinstance(val, str):
676 return (self.component_name == val)
677 # This signals to use the normal comparison operator
678 return NotImplemented
680 def __ne__(self, val):
681 if isinstance(val, str):
682 return (self.component_name != val)
683 # This signals to use the normal comparison operator
684 return NotImplemented
686 def properties(self):
687 return ['component_name', 'component_id', 'description', \
688 'meets_dfsg', 'overrides_count']
690 def not_null_constraints(self):
691 return ['component_name']
694 __all__.append('Component')
697 def get_component(component, session=None):
699 Returns database id for given C{component}.
701 @type component: string
702 @param component: The name of the override type
705 @return: the database id for the given component
708 component = component.lower()
710 q = session.query(Component).filter_by(component_name=component)
714 except NoResultFound:
717 __all__.append('get_component')
720 def get_mapped_component(component_name, session=None):
721 """get component after mappings
723 Evaluate component mappings from ComponentMappings in dak.conf for the
724 given component name.
726 @todo: ansgar wants to get rid of this. It's currently only used for
729 @type component_name: str
730 @param component_name: component name
732 @param session: database session
734 @rtype: L{daklib.dbconn.Component} or C{None}
735 @return: component after applying maps or C{None}
738 for m in cnf.value_list("ComponentMappings"):
739 (src, dst) = m.split()
740 if component_name == src:
742 component = session.query(Component).filter_by(component_name=component_name).first()
745 __all__.append('get_mapped_component')
748 def get_component_names(session=None):
750 Returns list of strings of component names.
753 @return: list of strings of component names
756 return [ x.component_name for x in session.query(Component).all() ]
758 __all__.append('get_component_names')
760 ################################################################################
762 class DBConfig(object):
763 def __init__(self, *args, **kwargs):
767 return '<DBConfig %s>' % self.name
769 __all__.append('DBConfig')
771 ################################################################################
774 def get_or_set_contents_file_id(filename, session=None):
776 Returns database id for given filename.
778 If no matching file is found, a row is inserted.
780 @type filename: string
781 @param filename: The filename
782 @type session: SQLAlchemy
783 @param session: Optional SQL session object (a temporary one will be
784 generated if not supplied). If not passed, a commit will be performed at
785 the end of the function, otherwise the caller is responsible for commiting.
788 @return: the database id for the given component
791 q = session.query(ContentFilename).filter_by(filename=filename)
794 ret = q.one().cafilename_id
795 except NoResultFound:
796 cf = ContentFilename()
797 cf.filename = filename
799 session.commit_or_flush()
800 ret = cf.cafilename_id
804 __all__.append('get_or_set_contents_file_id')
807 def get_contents(suite, overridetype, section=None, session=None):
809 Returns contents for a suite / overridetype combination, limiting
810 to a section if not None.
813 @param suite: Suite object
815 @type overridetype: OverrideType
816 @param overridetype: OverrideType object
818 @type section: Section
819 @param section: Optional section object to limit results to
821 @type session: SQLAlchemy
822 @param session: Optional SQL session object (a temporary one will be
823 generated if not supplied)
826 @return: ResultsProxy object set up to return tuples of (filename, section,
830 # find me all of the contents for a given suite
831 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
835 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
836 JOIN content_file_names n ON (c.filename=n.id)
837 JOIN binaries b ON (b.id=c.binary_pkg)
838 JOIN override o ON (o.package=b.package)
839 JOIN section s ON (s.id=o.section)
840 WHERE o.suite = :suiteid AND o.type = :overridetypeid
841 AND b.type=:overridetypename"""
843 vals = {'suiteid': suite.suite_id,
844 'overridetypeid': overridetype.overridetype_id,
845 'overridetypename': overridetype.overridetype}
847 if section is not None:
848 contents_q += " AND s.id = :sectionid"
849 vals['sectionid'] = section.section_id
851 contents_q += " ORDER BY fn"
853 return session.execute(contents_q, vals)
855 __all__.append('get_contents')
857 ################################################################################
859 class ContentFilepath(object):
860 def __init__(self, *args, **kwargs):
864 return '<ContentFilepath %s>' % self.filepath
866 __all__.append('ContentFilepath')
869 def get_or_set_contents_path_id(filepath, session=None):
871 Returns database id for given path.
873 If no matching file is found, a row is inserted.
875 @type filepath: string
876 @param filepath: The filepath
878 @type session: SQLAlchemy
879 @param session: Optional SQL session object (a temporary one will be
880 generated if not supplied). If not passed, a commit will be performed at
881 the end of the function, otherwise the caller is responsible for commiting.
884 @return: the database id for the given path
887 q = session.query(ContentFilepath).filter_by(filepath=filepath)
890 ret = q.one().cafilepath_id
891 except NoResultFound:
892 cf = ContentFilepath()
893 cf.filepath = filepath
895 session.commit_or_flush()
896 ret = cf.cafilepath_id
900 __all__.append('get_or_set_contents_path_id')
902 ################################################################################
904 class ContentAssociation(object):
905 def __init__(self, *args, **kwargs):
909 return '<ContentAssociation %s>' % self.ca_id
911 __all__.append('ContentAssociation')
913 def insert_content_paths(binary_id, fullpaths, session=None):
915 Make sure given path is associated with given binary id
918 @param binary_id: the id of the binary
919 @type fullpaths: list
920 @param fullpaths: the list of paths of the file being associated with the binary
921 @type session: SQLAlchemy session
922 @param session: Optional SQLAlchemy session. If this is passed, the caller
923 is responsible for ensuring a transaction has begun and committing the
924 results or rolling back based on the result code. If not passed, a commit
925 will be performed at the end of the function, otherwise the caller is
926 responsible for commiting.
928 @return: True upon success
933 session = DBConn().session()
938 def generate_path_dicts():
939 for fullpath in fullpaths:
940 if fullpath.startswith( './' ):
941 fullpath = fullpath[2:]
943 yield {'filename':fullpath, 'id': binary_id }
945 for d in generate_path_dicts():
946 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
955 traceback.print_exc()
957 # Only rollback if we set up the session ourself
964 __all__.append('insert_content_paths')
966 ################################################################################
968 class DSCFile(object):
969 def __init__(self, *args, **kwargs):
973 return '<DSCFile %s>' % self.dscfile_id
975 __all__.append('DSCFile')
978 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
980 Returns a list of DSCFiles which may be empty
982 @type dscfile_id: int (optional)
983 @param dscfile_id: the dscfile_id of the DSCFiles to find
985 @type source_id: int (optional)
986 @param source_id: the source id related to the DSCFiles to find
988 @type poolfile_id: int (optional)
989 @param poolfile_id: the poolfile id related to the DSCFiles to find
992 @return: Possibly empty list of DSCFiles
995 q = session.query(DSCFile)
997 if dscfile_id is not None:
998 q = q.filter_by(dscfile_id=dscfile_id)
1000 if source_id is not None:
1001 q = q.filter_by(source_id=source_id)
1003 if poolfile_id is not None:
1004 q = q.filter_by(poolfile_id=poolfile_id)
1008 __all__.append('get_dscfiles')
1010 ################################################################################
1012 class ExternalOverride(ORMObject):
1013 def __init__(self, *args, **kwargs):
1017 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1019 __all__.append('ExternalOverride')
1021 ################################################################################
1023 class PoolFile(ORMObject):
1024 def __init__(self, filename = None, filesize = -1, \
1026 self.filename = filename
1027 self.filesize = filesize
1028 self.md5sum = md5sum
1032 session = DBConn().session().object_session(self)
1033 af = session.query(ArchiveFile).join(Archive) \
1034 .filter(ArchiveFile.file == self) \
1035 .order_by(Archive.tainted.desc()).first()
1039 def component(self):
1040 session = DBConn().session().object_session(self)
1041 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1042 .group_by(ArchiveFile.component_id).one()
1043 return session.query(Component).get(component_id)
1047 return os.path.basename(self.filename)
1049 def is_valid(self, filesize = -1, md5sum = None):
1050 return self.filesize == long(filesize) and self.md5sum == md5sum
1052 def properties(self):
1053 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1054 'sha256sum', 'source', 'binary', 'last_used']
1056 def not_null_constraints(self):
1057 return ['filename', 'md5sum']
1059 def identical_to(self, filename):
1061 compare size and hash with the given file
1064 @return: true if the given file has the same size and hash as this object; false otherwise
1066 st = os.stat(filename)
1067 if self.filesize != st.st_size:
1070 f = open(filename, "r")
1071 sha256sum = apt_pkg.sha256sum(f)
1072 if sha256sum != self.sha256sum:
1077 __all__.append('PoolFile')
1080 def get_poolfile_like_name(filename, session=None):
1082 Returns an array of PoolFile objects which are like the given name
1084 @type filename: string
1085 @param filename: the filename of the file to check against the DB
1088 @return: array of PoolFile objects
1091 # TODO: There must be a way of properly using bind parameters with %FOO%
1092 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1096 __all__.append('get_poolfile_like_name')
1098 ################################################################################
1100 class Fingerprint(ORMObject):
1101 def __init__(self, fingerprint = None):
1102 self.fingerprint = fingerprint
1104 def properties(self):
1105 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1108 def not_null_constraints(self):
1109 return ['fingerprint']
1111 __all__.append('Fingerprint')
1114 def get_fingerprint(fpr, session=None):
1116 Returns Fingerprint object for given fpr.
1119 @param fpr: The fpr to find / add
1121 @type session: SQLAlchemy
1122 @param session: Optional SQL session object (a temporary one will be
1123 generated if not supplied).
1126 @return: the Fingerprint object for the given fpr or None
1129 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1133 except NoResultFound:
1138 __all__.append('get_fingerprint')
1141 def get_or_set_fingerprint(fpr, session=None):
1143 Returns Fingerprint object for given fpr.
1145 If no matching fpr is found, a row is inserted.
1148 @param fpr: The fpr to find / add
1150 @type session: SQLAlchemy
1151 @param session: Optional SQL session object (a temporary one will be
1152 generated if not supplied). If not passed, a commit will be performed at
1153 the end of the function, otherwise the caller is responsible for commiting.
1154 A flush will be performed either way.
1157 @return: the Fingerprint object for the given fpr
1160 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1164 except NoResultFound:
1165 fingerprint = Fingerprint()
1166 fingerprint.fingerprint = fpr
1167 session.add(fingerprint)
1168 session.commit_or_flush()
1173 __all__.append('get_or_set_fingerprint')
1175 ################################################################################
1177 # Helper routine for Keyring class
1178 def get_ldap_name(entry):
1180 for k in ["cn", "mn", "sn"]:
1182 if ret and ret[0] != "" and ret[0] != "-":
1184 return " ".join(name)
1186 ################################################################################
1188 class Keyring(object):
1189 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1190 " --with-colons --fingerprint --fingerprint"
1195 def __init__(self, *args, **kwargs):
1199 return '<Keyring %s>' % self.keyring_name
1201 def de_escape_gpg_str(self, txt):
1202 esclist = re.split(r'(\\x..)', txt)
1203 for x in range(1,len(esclist),2):
1204 esclist[x] = "%c" % (int(esclist[x][2:],16))
1205 return "".join(esclist)
1207 def parse_address(self, uid):
1208 """parses uid and returns a tuple of real name and email address"""
1210 (name, address) = email.Utils.parseaddr(uid)
1211 name = re.sub(r"\s*[(].*[)]", "", name)
1212 name = self.de_escape_gpg_str(name)
1215 return (name, address)
1217 def load_keys(self, keyring):
1218 if not self.keyring_id:
1219 raise Exception('Must be initialized with database information')
1221 k = os.popen(self.gpg_invocation % keyring, "r")
1226 field = line.split(":")
1227 if field[0] == "pub":
1230 (name, addr) = self.parse_address(field[9])
1232 self.keys[key]["email"] = addr
1233 self.keys[key]["name"] = name
1234 self.keys[key]["fingerprints"] = []
1236 elif key and field[0] == "sub" and len(field) >= 12:
1237 signingkey = ("s" in field[11])
1238 elif key and field[0] == "uid":
1239 (name, addr) = self.parse_address(field[9])
1240 if "email" not in self.keys[key] and "@" in addr:
1241 self.keys[key]["email"] = addr
1242 self.keys[key]["name"] = name
1243 elif signingkey and field[0] == "fpr":
1244 self.keys[key]["fingerprints"].append(field[9])
1245 self.fpr_lookup[field[9]] = key
1247 def import_users_from_ldap(self, session):
1251 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1252 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1253 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1255 l = ldap.open(LDAPServer)
1258 # Request a new TLS context. If there was already one, libldap
1259 # would not change the TLS options (like which CAs to trust).
1260 l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1261 l.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1262 l.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1263 l.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1266 l.simple_bind_s("","")
1267 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1268 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1269 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1271 ldap_fin_uid_id = {}
1278 uid = entry["uid"][0]
1279 name = get_ldap_name(entry)
1280 fingerprints = entry["keyFingerPrint"]
1282 for f in fingerprints:
1283 key = self.fpr_lookup.get(f, None)
1284 if key not in self.keys:
1286 self.keys[key]["uid"] = uid
1290 keyid = get_or_set_uid(uid, session).uid_id
1291 byuid[keyid] = (uid, name)
1292 byname[uid] = (keyid, name)
1294 return (byname, byuid)
1296 def generate_users_from_keyring(self, format, session):
1300 for x in self.keys.keys():
1301 if "email" not in self.keys[x]:
1303 self.keys[x]["uid"] = format % "invalid-uid"
1305 uid = format % self.keys[x]["email"]
1306 keyid = get_or_set_uid(uid, session).uid_id
1307 byuid[keyid] = (uid, self.keys[x]["name"])
1308 byname[uid] = (keyid, self.keys[x]["name"])
1309 self.keys[x]["uid"] = uid
1312 uid = format % "invalid-uid"
1313 keyid = get_or_set_uid(uid, session).uid_id
1314 byuid[keyid] = (uid, "ungeneratable user id")
1315 byname[uid] = (keyid, "ungeneratable user id")
1317 return (byname, byuid)
1319 __all__.append('Keyring')
1322 def get_keyring(keyring, session=None):
1324 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1325 If C{keyring} already has an entry, simply return the existing Keyring
1327 @type keyring: string
1328 @param keyring: the keyring name
1331 @return: the Keyring object for this keyring
1334 q = session.query(Keyring).filter_by(keyring_name=keyring)
1338 except NoResultFound:
1341 __all__.append('get_keyring')
1344 def get_active_keyring_paths(session=None):
1347 @return: list of active keyring paths
1349 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1351 __all__.append('get_active_keyring_paths')
1354 def get_primary_keyring_path(session=None):
1356 Get the full path to the highest priority active keyring
1359 @return: path to the active keyring with the highest priority or None if no
1360 keyring is configured
1362 keyrings = get_active_keyring_paths()
1364 if len(keyrings) > 0:
1369 __all__.append('get_primary_keyring_path')
1371 ################################################################################
1373 class DBChange(object):
1374 def __init__(self, *args, **kwargs):
1378 return '<DBChange %s>' % self.changesname
1380 __all__.append('DBChange')
1383 def get_dbchange(filename, session=None):
1385 returns DBChange object for given C{filename}.
1387 @type filename: string
1388 @param filename: the name of the file
1390 @type session: Session
1391 @param session: Optional SQLA session object (a temporary one will be
1392 generated if not supplied)
1395 @return: DBChange object for the given filename (C{None} if not present)
1398 q = session.query(DBChange).filter_by(changesname=filename)
1402 except NoResultFound:
1405 __all__.append('get_dbchange')
1407 ################################################################################
1409 class Maintainer(ORMObject):
1410 def __init__(self, name = None):
1413 def properties(self):
1414 return ['name', 'maintainer_id']
1416 def not_null_constraints(self):
1419 def get_split_maintainer(self):
1420 if not hasattr(self, 'name') or self.name is None:
1421 return ('', '', '', '')
1423 return fix_maintainer(self.name.strip())
1425 __all__.append('Maintainer')
1428 def get_or_set_maintainer(name, session=None):
1430 Returns Maintainer object for given maintainer name.
1432 If no matching maintainer name is found, a row is inserted.
1435 @param name: The maintainer name to add
1437 @type session: SQLAlchemy
1438 @param session: Optional SQL session object (a temporary one will be
1439 generated if not supplied). If not passed, a commit will be performed at
1440 the end of the function, otherwise the caller is responsible for commiting.
1441 A flush will be performed either way.
1444 @return: the Maintainer object for the given maintainer
1447 q = session.query(Maintainer).filter_by(name=name)
1450 except NoResultFound:
1451 maintainer = Maintainer()
1452 maintainer.name = name
1453 session.add(maintainer)
1454 session.commit_or_flush()
1459 __all__.append('get_or_set_maintainer')
1462 def get_maintainer(maintainer_id, session=None):
1464 Return the name of the maintainer behind C{maintainer_id} or None if that
1465 maintainer_id is invalid.
1467 @type maintainer_id: int
1468 @param maintainer_id: the id of the maintainer
1471 @return: the Maintainer with this C{maintainer_id}
1474 return session.query(Maintainer).get(maintainer_id)
1476 __all__.append('get_maintainer')
1478 ################################################################################
1480 class NewComment(object):
1481 def __init__(self, *args, **kwargs):
1485 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1487 __all__.append('NewComment')
1490 def has_new_comment(policy_queue, package, version, session=None):
1492 Returns true if the given combination of C{package}, C{version} has a comment.
1494 @type package: string
1495 @param package: name of the package
1497 @type version: string
1498 @param version: package version
1500 @type session: Session
1501 @param session: Optional SQLA session object (a temporary one will be
1502 generated if not supplied)
1508 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1509 q = q.filter_by(package=package)
1510 q = q.filter_by(version=version)
1512 return bool(q.count() > 0)
1514 __all__.append('has_new_comment')
1517 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1519 Returns (possibly empty) list of NewComment objects for the given
1522 @type package: string (optional)
1523 @param package: name of the package
1525 @type version: string (optional)
1526 @param version: package version
1528 @type comment_id: int (optional)
1529 @param comment_id: An id of a comment
1531 @type session: Session
1532 @param session: Optional SQLA session object (a temporary one will be
1533 generated if not supplied)
1536 @return: A (possibly empty) list of NewComment objects will be returned
1539 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1540 if package is not None: q = q.filter_by(package=package)
1541 if version is not None: q = q.filter_by(version=version)
1542 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1546 __all__.append('get_new_comments')
1548 ################################################################################
1550 class Override(ORMObject):
1551 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1552 section = None, priority = None):
1553 self.package = package
1555 self.component = component
1556 self.overridetype = overridetype
1557 self.section = section
1558 self.priority = priority
1560 def properties(self):
1561 return ['package', 'suite', 'component', 'overridetype', 'section', \
1564 def not_null_constraints(self):
1565 return ['package', 'suite', 'component', 'overridetype', 'section']
1567 __all__.append('Override')
1570 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1572 Returns Override object for the given parameters
1574 @type package: string
1575 @param package: The name of the package
1577 @type suite: string, list or None
1578 @param suite: The name of the suite (or suites if a list) to limit to. If
1579 None, don't limit. Defaults to None.
1581 @type component: string, list or None
1582 @param component: The name of the component (or components if a list) to
1583 limit to. If None, don't limit. Defaults to None.
1585 @type overridetype: string, list or None
1586 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1587 limit to. If None, don't limit. Defaults to None.
1589 @type session: Session
1590 @param session: Optional SQLA session object (a temporary one will be
1591 generated if not supplied)
1594 @return: A (possibly empty) list of Override objects will be returned
1597 q = session.query(Override)
1598 q = q.filter_by(package=package)
1600 if suite is not None:
1601 if not isinstance(suite, list): suite = [suite]
1602 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1604 if component is not None:
1605 if not isinstance(component, list): component = [component]
1606 q = q.join(Component).filter(Component.component_name.in_(component))
1608 if overridetype is not None:
1609 if not isinstance(overridetype, list): overridetype = [overridetype]
1610 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1614 __all__.append('get_override')
1617 ################################################################################
1619 class OverrideType(ORMObject):
1620 def __init__(self, overridetype = None):
1621 self.overridetype = overridetype
1623 def properties(self):
1624 return ['overridetype', 'overridetype_id', 'overrides_count']
1626 def not_null_constraints(self):
1627 return ['overridetype']
1629 __all__.append('OverrideType')
1632 def get_override_type(override_type, session=None):
1634 Returns OverrideType object for given C{override type}.
1636 @type override_type: string
1637 @param override_type: The name of the override type
1639 @type session: Session
1640 @param session: Optional SQLA session object (a temporary one will be
1641 generated if not supplied)
1644 @return: the database id for the given override type
1647 q = session.query(OverrideType).filter_by(overridetype=override_type)
1651 except NoResultFound:
1654 __all__.append('get_override_type')
1656 ################################################################################
1658 class PolicyQueue(object):
1659 def __init__(self, *args, **kwargs):
1663 return '<PolicyQueue %s>' % self.queue_name
1665 __all__.append('PolicyQueue')
1668 def get_policy_queue(queuename, session=None):
1670 Returns PolicyQueue object for given C{queue name}
1672 @type queuename: string
1673 @param queuename: The name of the queue
1675 @type session: Session
1676 @param session: Optional SQLA session object (a temporary one will be
1677 generated if not supplied)
1680 @return: PolicyQueue object for the given queue
1683 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1687 except NoResultFound:
1690 __all__.append('get_policy_queue')
1692 ################################################################################
1694 class PolicyQueueUpload(object):
1695 def __cmp__(self, other):
1696 ret = cmp(self.changes.source, other.changes.source)
1698 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1700 if self.source is not None and other.source is None:
1702 elif self.source is None and other.source is not None:
1705 ret = cmp(self.changes.changesname, other.changes.changesname)
1708 __all__.append('PolicyQueueUpload')
1710 ################################################################################
1712 class PolicyQueueByhandFile(object):
1715 __all__.append('PolicyQueueByhandFile')
1717 ################################################################################
1719 class Priority(ORMObject):
1720 def __init__(self, priority = None, level = None):
1721 self.priority = priority
1724 def properties(self):
1725 return ['priority', 'priority_id', 'level', 'overrides_count']
1727 def not_null_constraints(self):
1728 return ['priority', 'level']
1730 def __eq__(self, val):
1731 if isinstance(val, str):
1732 return (self.priority == val)
1733 # This signals to use the normal comparison operator
1734 return NotImplemented
1736 def __ne__(self, val):
1737 if isinstance(val, str):
1738 return (self.priority != val)
1739 # This signals to use the normal comparison operator
1740 return NotImplemented
1742 __all__.append('Priority')
1745 def get_priority(priority, session=None):
1747 Returns Priority object for given C{priority name}.
1749 @type priority: string
1750 @param priority: The name of the priority
1752 @type session: Session
1753 @param session: Optional SQLA session object (a temporary one will be
1754 generated if not supplied)
1757 @return: Priority object for the given priority
1760 q = session.query(Priority).filter_by(priority=priority)
1764 except NoResultFound:
1767 __all__.append('get_priority')
1770 def get_priorities(session=None):
1772 Returns dictionary of priority names -> id mappings
1774 @type session: Session
1775 @param session: Optional SQL session object (a temporary one will be
1776 generated if not supplied)
1779 @return: dictionary of priority names -> id mappings
1783 q = session.query(Priority)
1785 ret[x.priority] = x.priority_id
1789 __all__.append('get_priorities')
1791 ################################################################################
1793 class Section(ORMObject):
1794 def __init__(self, section = None):
1795 self.section = section
1797 def properties(self):
1798 return ['section', 'section_id', 'overrides_count']
1800 def not_null_constraints(self):
1803 def __eq__(self, val):
1804 if isinstance(val, str):
1805 return (self.section == val)
1806 # This signals to use the normal comparison operator
1807 return NotImplemented
1809 def __ne__(self, val):
1810 if isinstance(val, str):
1811 return (self.section != val)
1812 # This signals to use the normal comparison operator
1813 return NotImplemented
1815 __all__.append('Section')
1818 def get_section(section, session=None):
1820 Returns Section object for given C{section name}.
1822 @type section: string
1823 @param section: The name of the section
1825 @type session: Session
1826 @param session: Optional SQLA session object (a temporary one will be
1827 generated if not supplied)
1830 @return: Section object for the given section name
1833 q = session.query(Section).filter_by(section=section)
1837 except NoResultFound:
1840 __all__.append('get_section')
1843 def get_sections(session=None):
1845 Returns dictionary of section names -> id mappings
1847 @type session: Session
1848 @param session: Optional SQL session object (a temporary one will be
1849 generated if not supplied)
1852 @return: dictionary of section names -> id mappings
1856 q = session.query(Section)
1858 ret[x.section] = x.section_id
1862 __all__.append('get_sections')
1864 ################################################################################
1866 class SignatureHistory(ORMObject):
1868 def from_signed_file(cls, signed_file):
1869 """signature history entry from signed file
1871 @type signed_file: L{daklib.gpg.SignedFile}
1872 @param signed_file: signed file
1874 @rtype: L{SignatureHistory}
1877 self.fingerprint = signed_file.primary_fingerprint
1878 self.signature_timestamp = signed_file.signature_timestamp
1879 self.contents_sha1 = signed_file.contents_sha1()
1882 __all__.append('SignatureHistory')
1884 ################################################################################
1886 class SrcContents(ORMObject):
1887 def __init__(self, file = None, source = None):
1889 self.source = source
1891 def properties(self):
1892 return ['file', 'source']
1894 __all__.append('SrcContents')
1896 ################################################################################
1898 from debian.debfile import Deb822
1900 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1901 class Dak822(Deb822):
1902 def _internal_parser(self, sequence, fields=None):
1903 # The key is non-whitespace, non-colon characters before any colon.
1904 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1905 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1906 multi = re.compile(key_part + r"$")
1907 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1909 wanted_field = lambda f: fields is None or f in fields
1911 if isinstance(sequence, basestring):
1912 sequence = sequence.splitlines()
1916 for line in self.gpg_stripped_paragraph(sequence):
1917 m = single.match(line)
1920 self[curkey] = content
1922 if not wanted_field(m.group('key')):
1926 curkey = m.group('key')
1927 content = m.group('data')
1930 m = multi.match(line)
1933 self[curkey] = content
1935 if not wanted_field(m.group('key')):
1939 curkey = m.group('key')
1943 m = multidata.match(line)
1945 content += '\n' + line # XXX not m.group('data')?
1949 self[curkey] = content
1952 class DBSource(ORMObject):
1953 def __init__(self, source = None, version = None, maintainer = None, \
1954 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1955 self.source = source
1956 self.version = version
1957 self.maintainer = maintainer
1958 self.changedby = changedby
1959 self.poolfile = poolfile
1960 self.install_date = install_date
1961 self.fingerprint = fingerprint
1965 return self.source_id
1967 def properties(self):
1968 return ['source', 'source_id', 'maintainer', 'changedby', \
1969 'fingerprint', 'poolfile', 'version', 'suites_count', \
1970 'install_date', 'binaries_count', 'uploaders_count']
1972 def not_null_constraints(self):
1973 return ['source', 'version', 'install_date', 'maintainer', \
1974 'changedby', 'poolfile']
1976 def read_control_fields(self):
1978 Reads the control information from a dsc
1981 @return: fields is the dsc information in a dictionary form
1983 fullpath = self.poolfile.fullpath
1984 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1987 metadata = association_proxy('key', 'value')
1989 def scan_contents(self):
1991 Returns a set of names for non directories. The path names are
1992 normalized after converting them from either utf-8 or iso8859-1
1995 fullpath = self.poolfile.fullpath
1996 from daklib.contents import UnpackedSource
1997 unpacked = UnpackedSource(fullpath)
1999 for name in unpacked.get_all_filenames():
2000 # enforce proper utf-8 encoding
2002 name.decode('utf-8')
2003 except UnicodeDecodeError:
2004 name = name.decode('iso8859-1').encode('utf-8')
2008 __all__.append('DBSource')
2011 def source_exists(source, source_version, suites = ["any"], session=None):
2013 Ensure that source exists somewhere in the archive for the binary
2014 upload being processed.
2015 1. exact match => 1.0-3
2016 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2018 @type source: string
2019 @param source: source name
2021 @type source_version: string
2022 @param source_version: expected source version
2025 @param suites: list of suites to check in, default I{any}
2027 @type session: Session
2028 @param session: Optional SQLA session object (a temporary one will be
2029 generated if not supplied)
2032 @return: returns 1 if a source with expected version is found, otherwise 0
2039 from daklib.regexes import re_bin_only_nmu
2040 orig_source_version = re_bin_only_nmu.sub('', source_version)
2042 for suite in suites:
2043 q = session.query(DBSource).filter_by(source=source). \
2044 filter(DBSource.version.in_([source_version, orig_source_version]))
2046 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2047 s = get_suite(suite, session)
2049 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2050 considered_suites = [ vc.reference for vc in enhances_vcs ]
2051 considered_suites.append(s)
2053 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2058 # No source found so return not ok
2063 __all__.append('source_exists')
2066 def get_suites_source_in(source, session=None):
2068 Returns list of Suite objects which given C{source} name is in
2071 @param source: DBSource package name to search for
2074 @return: list of Suite objects for the given source
2077 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2079 __all__.append('get_suites_source_in')
2082 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2084 Returns list of DBSource objects for given C{source} name and other parameters
2087 @param source: DBSource package name to search for
2089 @type version: str or None
2090 @param version: DBSource version name to search for or None if not applicable
2092 @type dm_upload_allowed: bool
2093 @param dm_upload_allowed: If None, no effect. If True or False, only
2094 return packages with that dm_upload_allowed setting
2096 @type session: Session
2097 @param session: Optional SQL session object (a temporary one will be
2098 generated if not supplied)
2101 @return: list of DBSource objects for the given name (may be empty)
2104 q = session.query(DBSource).filter_by(source=source)
2106 if version is not None:
2107 q = q.filter_by(version=version)
2109 if dm_upload_allowed is not None:
2110 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2114 __all__.append('get_sources_from_name')
2116 # FIXME: This function fails badly if it finds more than 1 source package and
2117 # its implementation is trivial enough to be inlined.
2119 def get_source_in_suite(source, suite, session=None):
2121 Returns a DBSource object for a combination of C{source} and C{suite}.
2123 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2124 - B{suite} - a suite name, eg. I{unstable}
2126 @type source: string
2127 @param source: source package name
2130 @param suite: the suite name
2133 @return: the version for I{source} in I{suite}
2137 q = get_suite(suite, session).get_sources(source)
2140 except NoResultFound:
2143 __all__.append('get_source_in_suite')
2146 def import_metadata_into_db(obj, session=None):
2148 This routine works on either DBBinary or DBSource objects and imports
2149 their metadata into the database
2151 fields = obj.read_control_fields()
2152 for k in fields.keys():
2155 val = str(fields[k])
2156 except UnicodeEncodeError:
2157 # Fall back to UTF-8
2159 val = fields[k].encode('utf-8')
2160 except UnicodeEncodeError:
2161 # Finally try iso8859-1
2162 val = fields[k].encode('iso8859-1')
2163 # Otherwise we allow the exception to percolate up and we cause
2164 # a reject as someone is playing silly buggers
2166 obj.metadata[get_or_set_metadatakey(k, session)] = val
2168 session.commit_or_flush()
2170 __all__.append('import_metadata_into_db')
2172 ################################################################################
2174 class SrcFormat(object):
2175 def __init__(self, *args, **kwargs):
2179 return '<SrcFormat %s>' % (self.format_name)
2181 __all__.append('SrcFormat')
2183 ################################################################################
2185 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2186 ('SuiteID', 'suite_id'),
2187 ('Version', 'version'),
2188 ('Origin', 'origin'),
2190 ('Description', 'description'),
2191 ('Untouchable', 'untouchable'),
2192 ('Announce', 'announce'),
2193 ('Codename', 'codename'),
2194 ('OverrideCodename', 'overridecodename'),
2195 ('ValidTime', 'validtime'),
2196 ('Priority', 'priority'),
2197 ('NotAutomatic', 'notautomatic'),
2198 ('CopyChanges', 'copychanges'),
2199 ('OverrideSuite', 'overridesuite')]
2201 # Why the heck don't we have any UNIQUE constraints in table suite?
2202 # TODO: Add UNIQUE constraints for appropriate columns.
2203 class Suite(ORMObject):
2204 def __init__(self, suite_name = None, version = None):
2205 self.suite_name = suite_name
2206 self.version = version
2208 def properties(self):
2209 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2212 def not_null_constraints(self):
2213 return ['suite_name']
2215 def __eq__(self, val):
2216 if isinstance(val, str):
2217 return (self.suite_name == val)
2218 # This signals to use the normal comparison operator
2219 return NotImplemented
2221 def __ne__(self, val):
2222 if isinstance(val, str):
2223 return (self.suite_name != val)
2224 # This signals to use the normal comparison operator
2225 return NotImplemented
2229 for disp, field in SUITE_FIELDS:
2230 val = getattr(self, field, None)
2232 ret.append("%s: %s" % (disp, val))
2234 return "\n".join(ret)
2236 def get_architectures(self, skipsrc=False, skipall=False):
2238 Returns list of Architecture objects
2240 @type skipsrc: boolean
2241 @param skipsrc: Whether to skip returning the 'source' architecture entry
2244 @type skipall: boolean
2245 @param skipall: Whether to skip returning the 'all' architecture entry
2249 @return: list of Architecture objects for the given name (may be empty)
2252 q = object_session(self).query(Architecture).with_parent(self)
2254 q = q.filter(Architecture.arch_string != 'source')
2256 q = q.filter(Architecture.arch_string != 'all')
2257 return q.order_by(Architecture.arch_string).all()
2259 def get_sources(self, source):
2261 Returns a query object representing DBSource that is part of C{suite}.
2263 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2265 @type source: string
2266 @param source: source package name
2268 @rtype: sqlalchemy.orm.query.Query
2269 @return: a query of DBSource
2273 session = object_session(self)
2274 return session.query(DBSource).filter_by(source = source). \
2277 def get_overridesuite(self):
2278 if self.overridesuite is None:
2281 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2285 return os.path.join(self.archive.path, 'dists', self.suite_name)
2287 __all__.append('Suite')
2290 def get_suite(suite, session=None):
2292 Returns Suite object for given C{suite name}.
2295 @param suite: The name of the suite
2297 @type session: Session
2298 @param session: Optional SQLA session object (a temporary one will be
2299 generated if not supplied)
2302 @return: Suite object for the requested suite name (None if not present)
2305 q = session.query(Suite).filter_by(suite_name=suite)
2309 except NoResultFound:
2312 __all__.append('get_suite')
2314 ################################################################################
2317 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2319 Returns list of Architecture objects for given C{suite} name. The list is
2320 empty if suite does not exist.
2323 @param suite: Suite name to search for
2325 @type skipsrc: boolean
2326 @param skipsrc: Whether to skip returning the 'source' architecture entry
2329 @type skipall: boolean
2330 @param skipall: Whether to skip returning the 'all' architecture entry
2333 @type session: Session
2334 @param session: Optional SQL session object (a temporary one will be
2335 generated if not supplied)
2338 @return: list of Architecture objects for the given name (may be empty)
2342 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2343 except AttributeError:
2346 __all__.append('get_suite_architectures')
2348 ################################################################################
2350 class Uid(ORMObject):
2351 def __init__(self, uid = None, name = None):
2355 def __eq__(self, val):
2356 if isinstance(val, str):
2357 return (self.uid == val)
2358 # This signals to use the normal comparison operator
2359 return NotImplemented
2361 def __ne__(self, val):
2362 if isinstance(val, str):
2363 return (self.uid != val)
2364 # This signals to use the normal comparison operator
2365 return NotImplemented
2367 def properties(self):
2368 return ['uid', 'name', 'fingerprint']
2370 def not_null_constraints(self):
2373 __all__.append('Uid')
2376 def get_or_set_uid(uidname, session=None):
2378 Returns uid object for given uidname.
2380 If no matching uidname is found, a row is inserted.
2382 @type uidname: string
2383 @param uidname: The uid to add
2385 @type session: SQLAlchemy
2386 @param session: Optional SQL session object (a temporary one will be
2387 generated if not supplied). If not passed, a commit will be performed at
2388 the end of the function, otherwise the caller is responsible for commiting.
2391 @return: the uid object for the given uidname
2394 q = session.query(Uid).filter_by(uid=uidname)
2398 except NoResultFound:
2402 session.commit_or_flush()
2407 __all__.append('get_or_set_uid')
2410 def get_uid_from_fingerprint(fpr, session=None):
2411 q = session.query(Uid)
2412 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2416 except NoResultFound:
2419 __all__.append('get_uid_from_fingerprint')
2421 ################################################################################
2423 class MetadataKey(ORMObject):
2424 def __init__(self, key = None):
2427 def properties(self):
2430 def not_null_constraints(self):
2433 __all__.append('MetadataKey')
2436 def get_or_set_metadatakey(keyname, session=None):
2438 Returns MetadataKey object for given uidname.
2440 If no matching keyname is found, a row is inserted.
2442 @type uidname: string
2443 @param uidname: The keyname to add
2445 @type session: SQLAlchemy
2446 @param session: Optional SQL session object (a temporary one will be
2447 generated if not supplied). If not passed, a commit will be performed at
2448 the end of the function, otherwise the caller is responsible for commiting.
2451 @return: the metadatakey object for the given keyname
2454 q = session.query(MetadataKey).filter_by(key=keyname)
2458 except NoResultFound:
2459 ret = MetadataKey(keyname)
2461 session.commit_or_flush()
2465 __all__.append('get_or_set_metadatakey')
2467 ################################################################################
2469 class BinaryMetadata(ORMObject):
2470 def __init__(self, key = None, value = None, binary = None):
2473 self.binary = binary
2475 def properties(self):
2476 return ['binary', 'key', 'value']
2478 def not_null_constraints(self):
2481 __all__.append('BinaryMetadata')
2483 ################################################################################
2485 class SourceMetadata(ORMObject):
2486 def __init__(self, key = None, value = None, source = None):
2489 self.source = source
2491 def properties(self):
2492 return ['source', 'key', 'value']
2494 def not_null_constraints(self):
2497 __all__.append('SourceMetadata')
2499 ################################################################################
2501 class VersionCheck(ORMObject):
2502 def __init__(self, *args, **kwargs):
2505 def properties(self):
2506 #return ['suite_id', 'check', 'reference_id']
2509 def not_null_constraints(self):
2510 return ['suite', 'check', 'reference']
2512 __all__.append('VersionCheck')
2515 def get_version_checks(suite_name, check = None, session = None):
2516 suite = get_suite(suite_name, session)
2518 # Make sure that what we return is iterable so that list comprehensions
2519 # involving this don't cause a traceback
2521 q = session.query(VersionCheck).filter_by(suite=suite)
2523 q = q.filter_by(check=check)
2526 __all__.append('get_version_checks')
2528 ################################################################################
2530 class DBConn(object):
2532 database module init.
2536 def __init__(self, *args, **kwargs):
2537 self.__dict__ = self.__shared_state
2539 if not getattr(self, 'initialised', False):
2540 self.initialised = True
2541 self.debug = kwargs.has_key('debug')
2544 def __setuptables(self):
2547 'acl_architecture_map',
2548 'acl_fingerprint_map',
2555 'binaries_metadata',
2562 'external_overrides',
2563 'extra_src_references',
2565 'files_archive_map',
2571 # TODO: the maintainer column in table override should be removed.
2575 'policy_queue_upload',
2576 'policy_queue_upload_binaries_map',
2577 'policy_queue_byhand_file',
2580 'signature_history',
2589 'suite_architectures',
2590 'suite_build_queue_copy',
2591 'suite_src_formats',
2597 'almost_obsolete_all_associations',
2598 'almost_obsolete_src_associations',
2599 'any_associations_source',
2600 'bin_associations_binaries',
2601 'binaries_suite_arch',
2604 'newest_all_associations',
2605 'newest_any_associations',
2607 'newest_src_association',
2608 'obsolete_all_associations',
2609 'obsolete_any_associations',
2610 'obsolete_any_by_all_associations',
2611 'obsolete_src_associations',
2613 'src_associations_bin',
2614 'src_associations_src',
2615 'suite_arch_by_name',
2618 for table_name in tables:
2619 table = Table(table_name, self.db_meta, \
2620 autoload=True, useexisting=True)
2621 setattr(self, 'tbl_%s' % table_name, table)
2623 for view_name in views:
2624 view = Table(view_name, self.db_meta, autoload=True)
2625 setattr(self, 'view_%s' % view_name, view)
2627 def __setupmappers(self):
2628 mapper(Architecture, self.tbl_architecture,
2629 properties = dict(arch_id = self.tbl_architecture.c.id,
2630 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2631 order_by=self.tbl_suite.c.suite_name,
2632 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2633 extension = validator)
2635 mapper(ACL, self.tbl_acl,
2637 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2638 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2639 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2640 per_source = relation(ACLPerSource, collection_class=set),
2643 mapper(ACLPerSource, self.tbl_acl_per_source,
2645 acl = relation(ACL),
2646 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2647 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2650 mapper(Archive, self.tbl_archive,
2651 properties = dict(archive_id = self.tbl_archive.c.id,
2652 archive_name = self.tbl_archive.c.name))
2654 mapper(ArchiveFile, self.tbl_files_archive_map,
2655 properties = dict(archive = relation(Archive, backref='files'),
2656 component = relation(Component),
2657 file = relation(PoolFile, backref='archives')))
2659 mapper(BuildQueue, self.tbl_build_queue,
2660 properties = dict(queue_id = self.tbl_build_queue.c.id,
2661 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2663 mapper(DBBinary, self.tbl_binaries,
2664 properties = dict(binary_id = self.tbl_binaries.c.id,
2665 package = self.tbl_binaries.c.package,
2666 version = self.tbl_binaries.c.version,
2667 maintainer_id = self.tbl_binaries.c.maintainer,
2668 maintainer = relation(Maintainer),
2669 source_id = self.tbl_binaries.c.source,
2670 source = relation(DBSource, backref='binaries'),
2671 arch_id = self.tbl_binaries.c.architecture,
2672 architecture = relation(Architecture),
2673 poolfile_id = self.tbl_binaries.c.file,
2674 poolfile = relation(PoolFile),
2675 binarytype = self.tbl_binaries.c.type,
2676 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2677 fingerprint = relation(Fingerprint),
2678 install_date = self.tbl_binaries.c.install_date,
2679 suites = relation(Suite, secondary=self.tbl_bin_associations,
2680 backref=backref('binaries', lazy='dynamic')),
2681 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2682 backref=backref('extra_binary_references', lazy='dynamic')),
2683 key = relation(BinaryMetadata, cascade='all',
2684 collection_class=attribute_mapped_collection('key'))),
2685 extension = validator)
2687 mapper(Component, self.tbl_component,
2688 properties = dict(component_id = self.tbl_component.c.id,
2689 component_name = self.tbl_component.c.name),
2690 extension = validator)
2692 mapper(DBConfig, self.tbl_config,
2693 properties = dict(config_id = self.tbl_config.c.id))
2695 mapper(DSCFile, self.tbl_dsc_files,
2696 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2697 source_id = self.tbl_dsc_files.c.source,
2698 source = relation(DBSource),
2699 poolfile_id = self.tbl_dsc_files.c.file,
2700 poolfile = relation(PoolFile)))
2702 mapper(ExternalOverride, self.tbl_external_overrides,
2704 suite_id = self.tbl_external_overrides.c.suite,
2705 suite = relation(Suite),
2706 component_id = self.tbl_external_overrides.c.component,
2707 component = relation(Component)))
2709 mapper(PoolFile, self.tbl_files,
2710 properties = dict(file_id = self.tbl_files.c.id,
2711 filesize = self.tbl_files.c.size),
2712 extension = validator)
2714 mapper(Fingerprint, self.tbl_fingerprint,
2715 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2716 uid_id = self.tbl_fingerprint.c.uid,
2717 uid = relation(Uid),
2718 keyring_id = self.tbl_fingerprint.c.keyring,
2719 keyring = relation(Keyring),
2720 acl = relation(ACL)),
2721 extension = validator)
2723 mapper(Keyring, self.tbl_keyrings,
2724 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2725 keyring_id = self.tbl_keyrings.c.id,
2726 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2728 mapper(DBChange, self.tbl_changes,
2729 properties = dict(change_id = self.tbl_changes.c.id,
2730 seen = self.tbl_changes.c.seen,
2731 source = self.tbl_changes.c.source,
2732 binaries = self.tbl_changes.c.binaries,
2733 architecture = self.tbl_changes.c.architecture,
2734 distribution = self.tbl_changes.c.distribution,
2735 urgency = self.tbl_changes.c.urgency,
2736 maintainer = self.tbl_changes.c.maintainer,
2737 changedby = self.tbl_changes.c.changedby,
2738 date = self.tbl_changes.c.date,
2739 version = self.tbl_changes.c.version))
2741 mapper(Maintainer, self.tbl_maintainer,
2742 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2743 maintains_sources = relation(DBSource, backref='maintainer',
2744 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2745 changed_sources = relation(DBSource, backref='changedby',
2746 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2747 extension = validator)
2749 mapper(NewComment, self.tbl_new_comments,
2750 properties = dict(comment_id = self.tbl_new_comments.c.id,
2751 policy_queue = relation(PolicyQueue)))
2753 mapper(Override, self.tbl_override,
2754 properties = dict(suite_id = self.tbl_override.c.suite,
2755 suite = relation(Suite, \
2756 backref=backref('overrides', lazy='dynamic')),
2757 package = self.tbl_override.c.package,
2758 component_id = self.tbl_override.c.component,
2759 component = relation(Component, \
2760 backref=backref('overrides', lazy='dynamic')),
2761 priority_id = self.tbl_override.c.priority,
2762 priority = relation(Priority, \
2763 backref=backref('overrides', lazy='dynamic')),
2764 section_id = self.tbl_override.c.section,
2765 section = relation(Section, \
2766 backref=backref('overrides', lazy='dynamic')),
2767 overridetype_id = self.tbl_override.c.type,
2768 overridetype = relation(OverrideType, \
2769 backref=backref('overrides', lazy='dynamic'))))
2771 mapper(OverrideType, self.tbl_override_type,
2772 properties = dict(overridetype = self.tbl_override_type.c.type,
2773 overridetype_id = self.tbl_override_type.c.id))
2775 mapper(PolicyQueue, self.tbl_policy_queue,
2776 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2777 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2779 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2781 changes = relation(DBChange),
2782 policy_queue = relation(PolicyQueue, backref='uploads'),
2783 target_suite = relation(Suite),
2784 source = relation(DBSource),
2785 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2788 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2790 upload = relation(PolicyQueueUpload, backref='byhand'),
2794 mapper(Priority, self.tbl_priority,
2795 properties = dict(priority_id = self.tbl_priority.c.id))
2797 mapper(Section, self.tbl_section,
2798 properties = dict(section_id = self.tbl_section.c.id,
2799 section=self.tbl_section.c.section))
2801 mapper(SignatureHistory, self.tbl_signature_history)
2803 mapper(DBSource, self.tbl_source,
2804 properties = dict(source_id = self.tbl_source.c.id,
2805 version = self.tbl_source.c.version,
2806 maintainer_id = self.tbl_source.c.maintainer,
2807 poolfile_id = self.tbl_source.c.file,
2808 poolfile = relation(PoolFile),
2809 fingerprint_id = self.tbl_source.c.sig_fpr,
2810 fingerprint = relation(Fingerprint),
2811 changedby_id = self.tbl_source.c.changedby,
2812 srcfiles = relation(DSCFile,
2813 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2814 suites = relation(Suite, secondary=self.tbl_src_associations,
2815 backref=backref('sources', lazy='dynamic')),
2816 uploaders = relation(Maintainer,
2817 secondary=self.tbl_src_uploaders),
2818 key = relation(SourceMetadata, cascade='all',
2819 collection_class=attribute_mapped_collection('key'))),
2820 extension = validator)
2822 mapper(SrcFormat, self.tbl_src_format,
2823 properties = dict(src_format_id = self.tbl_src_format.c.id,
2824 format_name = self.tbl_src_format.c.format_name))
2826 mapper(Suite, self.tbl_suite,
2827 properties = dict(suite_id = self.tbl_suite.c.id,
2828 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2829 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2830 copy_queues = relation(BuildQueue,
2831 secondary=self.tbl_suite_build_queue_copy),
2832 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2833 backref=backref('suites', lazy='dynamic')),
2834 archive = relation(Archive, backref='suites'),
2835 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set)),
2836 extension = validator)
2838 mapper(Uid, self.tbl_uid,
2839 properties = dict(uid_id = self.tbl_uid.c.id,
2840 fingerprint = relation(Fingerprint)),
2841 extension = validator)
2843 mapper(BinContents, self.tbl_bin_contents,
2845 binary = relation(DBBinary,
2846 backref=backref('contents', lazy='dynamic', cascade='all')),
2847 file = self.tbl_bin_contents.c.file))
2849 mapper(SrcContents, self.tbl_src_contents,
2851 source = relation(DBSource,
2852 backref=backref('contents', lazy='dynamic', cascade='all')),
2853 file = self.tbl_src_contents.c.file))
2855 mapper(MetadataKey, self.tbl_metadata_keys,
2857 key_id = self.tbl_metadata_keys.c.key_id,
2858 key = self.tbl_metadata_keys.c.key))
2860 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2862 binary_id = self.tbl_binaries_metadata.c.bin_id,
2863 binary = relation(DBBinary),
2864 key_id = self.tbl_binaries_metadata.c.key_id,
2865 key = relation(MetadataKey),
2866 value = self.tbl_binaries_metadata.c.value))
2868 mapper(SourceMetadata, self.tbl_source_metadata,
2870 source_id = self.tbl_source_metadata.c.src_id,
2871 source = relation(DBSource),
2872 key_id = self.tbl_source_metadata.c.key_id,
2873 key = relation(MetadataKey),
2874 value = self.tbl_source_metadata.c.value))
2876 mapper(VersionCheck, self.tbl_version_check,
2878 suite_id = self.tbl_version_check.c.suite,
2879 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2880 reference_id = self.tbl_version_check.c.reference,
2881 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2883 ## Connection functions
2884 def __createconn(self):
2885 from config import Config
2887 if cnf.has_key("DB::Service"):
2888 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2889 elif cnf.has_key("DB::Host"):
2891 connstr = "postgresql://%s" % cnf["DB::Host"]
2892 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2893 connstr += ":%s" % cnf["DB::Port"]
2894 connstr += "/%s" % cnf["DB::Name"]
2897 connstr = "postgresql:///%s" % cnf["DB::Name"]
2898 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2899 connstr += "?port=%s" % cnf["DB::Port"]
2901 engine_args = { 'echo': self.debug }
2902 if cnf.has_key('DB::PoolSize'):
2903 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2904 if cnf.has_key('DB::MaxOverflow'):
2905 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2906 if sa_major_version in ('0.6', '0.7') and cnf.has_key('DB::Unicode') and \
2907 cnf['DB::Unicode'] == 'false':
2908 engine_args['use_native_unicode'] = False
2910 # Monkey patch a new dialect in in order to support service= syntax
2911 import sqlalchemy.dialects.postgresql
2912 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2913 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2914 def create_connect_args(self, url):
2915 if str(url).startswith('postgresql://service='):
2917 servicename = str(url)[21:]
2918 return (['service=%s' % servicename], {})
2920 return PGDialect_psycopg2.create_connect_args(self, url)
2922 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2925 self.db_pg = create_engine(connstr, **engine_args)
2926 self.db_meta = MetaData()
2927 self.db_meta.bind = self.db_pg
2928 self.db_smaker = sessionmaker(bind=self.db_pg,
2932 self.__setuptables()
2933 self.__setupmappers()
2935 except OperationalError as e:
2937 utils.fubar("Cannot connect to database (%s)" % str(e))
2939 self.pid = os.getpid()
2941 def session(self, work_mem = 0):
2943 Returns a new session object. If a work_mem parameter is provided a new
2944 transaction is started and the work_mem parameter is set for this
2945 transaction. The work_mem parameter is measured in MB. A default value
2946 will be used if the parameter is not set.
2948 # reinitialize DBConn in new processes
2949 if self.pid != os.getpid():
2952 session = self.db_smaker()
2954 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2957 __all__.append('DBConn')