5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 warnings.filterwarnings('ignore', \
85 "Predicate of partial index .* ignored during reflection", \
89 ################################################################################
91 # Patch in support for the debversion field type so that it works during
95 # that is for sqlalchemy 0.6
96 UserDefinedType = sqltypes.UserDefinedType
98 # this one for sqlalchemy 0.5
99 UserDefinedType = sqltypes.TypeEngine
101 class DebVersion(UserDefinedType):
102 def get_col_spec(self):
105 def bind_processor(self, dialect):
108 # ' = None' is needed for sqlalchemy 0.5:
109 def result_processor(self, dialect, coltype = None):
112 sa_major_version = sqlalchemy.__version__[0:3]
113 if sa_major_version in ["0.5", "0.6", "0.7"]:
114 from sqlalchemy.databases import postgres
115 postgres.ischema_names['debversion'] = DebVersion
117 raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py")
119 ################################################################################
121 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
123 ################################################################################
125 def session_wrapper(fn):
127 Wrapper around common ".., session=None):" handling. If the wrapped
128 function is called without passing 'session', we create a local one
129 and destroy it when the function ends.
131 Also attaches a commit_or_flush method to the session; if we created a
132 local session, this is a synonym for session.commit(), otherwise it is a
133 synonym for session.flush().
136 def wrapped(*args, **kwargs):
137 private_transaction = False
139 # Find the session object
140 session = kwargs.get('session')
143 if len(args) <= len(getargspec(fn)[0]) - 1:
144 # No session specified as last argument or in kwargs
145 private_transaction = True
146 session = kwargs['session'] = DBConn().session()
148 # Session is last argument in args
152 session = args[-1] = DBConn().session()
153 private_transaction = True
155 if private_transaction:
156 session.commit_or_flush = session.commit
158 session.commit_or_flush = session.flush
161 return fn(*args, **kwargs)
163 if private_transaction:
164 # We created a session; close it.
167 wrapped.__doc__ = fn.__doc__
168 wrapped.func_name = fn.func_name
172 __all__.append('session_wrapper')
174 ################################################################################
176 class ORMObject(object):
178 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
179 derived classes must implement the properties() method.
182 def properties(self):
184 This method should be implemented by all derived classes and returns a
185 list of the important properties. The properties 'created' and
186 'modified' will be added automatically. A suffix '_count' should be
187 added to properties that are lists or query objects. The most important
188 property name should be returned as the first element in the list
189 because it is used by repr().
195 Returns a JSON representation of the object based on the properties
196 returned from the properties() method.
199 # add created and modified
200 all_properties = self.properties() + ['created', 'modified']
201 for property in all_properties:
202 # check for list or query
203 if property[-6:] == '_count':
204 real_property = property[:-6]
205 if not hasattr(self, real_property):
207 value = getattr(self, real_property)
208 if hasattr(value, '__len__'):
211 elif hasattr(value, 'count'):
212 # query (but not during validation)
213 if self.in_validation:
215 value = value.count()
217 raise KeyError('Do not understand property %s.' % property)
219 if not hasattr(self, property):
222 value = getattr(self, property)
226 elif isinstance(value, ORMObject):
227 # use repr() for ORMObject types
230 # we want a string for all other types because json cannot
233 data[property] = value
234 return json.dumps(data)
238 Returns the name of the class.
240 return type(self).__name__
244 Returns a short string representation of the object using the first
245 element from the properties() method.
247 primary_property = self.properties()[0]
248 value = getattr(self, primary_property)
249 return '<%s %s>' % (self.classname(), str(value))
253 Returns a human readable form of the object using the properties()
256 return '<%s %s>' % (self.classname(), self.json())
258 def not_null_constraints(self):
260 Returns a list of properties that must be not NULL. Derived classes
261 should override this method if needed.
265 validation_message = \
266 "Validation failed because property '%s' must not be empty in object\n%s"
268 in_validation = False
272 This function validates the not NULL constraints as returned by
273 not_null_constraints(). It raises the DBUpdateError exception if
276 for property in self.not_null_constraints():
277 # TODO: It is a bit awkward that the mapper configuration allow
278 # directly setting the numeric _id columns. We should get rid of it
280 if hasattr(self, property + '_id') and \
281 getattr(self, property + '_id') is not None:
283 if not hasattr(self, property) or getattr(self, property) is None:
284 # str() might lead to races due to a 2nd flush
285 self.in_validation = True
286 message = self.validation_message % (property, str(self))
287 self.in_validation = False
288 raise DBUpdateError(message)
292 def get(cls, primary_key, session = None):
294 This is a support function that allows getting an object by its primary
297 Architecture.get(3[, session])
299 instead of the more verbose
301 session.query(Architecture).get(3)
303 return session.query(cls).get(primary_key)
305 def session(self, replace = False):
307 Returns the current session that is associated with the object. May
308 return None is object is in detached state.
311 return object_session(self)
313 def clone(self, session = None):
315 Clones the current object in a new session and returns the new clone. A
316 fresh session is created if the optional session parameter is not
317 provided. The function will fail if a session is provided and has
320 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
321 an existing object to allow several threads to work with their own
322 instances of an ORMObject.
324 WARNING: Only persistent (committed) objects can be cloned. Changes
325 made to the original object that are not committed yet will get lost.
326 The session of the new object will always be rolled back to avoid
330 if self.session() is None:
331 raise RuntimeError( \
332 'Method clone() failed for detached object:\n%s' % self)
333 self.session().flush()
334 mapper = object_mapper(self)
335 primary_key = mapper.primary_key_from_instance(self)
336 object_class = self.__class__
338 session = DBConn().session()
339 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
340 raise RuntimeError( \
341 'Method clone() failed due to unflushed changes in session.')
342 new_object = session.query(object_class).get(primary_key)
344 if new_object is None:
345 raise RuntimeError( \
346 'Method clone() failed for non-persistent object:\n%s' % self)
349 __all__.append('ORMObject')
351 ################################################################################
353 class Validator(MapperExtension):
355 This class calls the validate() method for each instance for the
356 'before_update' and 'before_insert' events. A global object validator is
357 used for configuring the individual mappers.
360 def before_update(self, mapper, connection, instance):
364 def before_insert(self, mapper, connection, instance):
368 validator = Validator()
370 ################################################################################
372 class ACL(ORMObject):
374 return "<ACL {0}>".format(self.name)
376 __all__.append('ACL')
378 class ACLPerSource(ORMObject):
380 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
382 __all__.append('ACLPerSource')
384 ################################################################################
386 class Architecture(ORMObject):
387 def __init__(self, arch_string = None, description = None):
388 self.arch_string = arch_string
389 self.description = description
391 def __eq__(self, val):
392 if isinstance(val, str):
393 return (self.arch_string== val)
394 # This signals to use the normal comparison operator
395 return NotImplemented
397 def __ne__(self, val):
398 if isinstance(val, str):
399 return (self.arch_string != val)
400 # This signals to use the normal comparison operator
401 return NotImplemented
403 def properties(self):
404 return ['arch_string', 'arch_id', 'suites_count']
406 def not_null_constraints(self):
407 return ['arch_string']
409 __all__.append('Architecture')
412 def get_architecture(architecture, session=None):
414 Returns database id for given C{architecture}.
416 @type architecture: string
417 @param architecture: The name of the architecture
419 @type session: Session
420 @param session: Optional SQLA session object (a temporary one will be
421 generated if not supplied)
424 @return: Architecture object for the given arch (None if not present)
427 q = session.query(Architecture).filter_by(arch_string=architecture)
431 except NoResultFound:
434 __all__.append('get_architecture')
436 # TODO: should be removed because the implementation is too trivial
438 def get_architecture_suites(architecture, session=None):
440 Returns list of Suite objects for given C{architecture} name
442 @type architecture: str
443 @param architecture: Architecture name to search for
445 @type session: Session
446 @param session: Optional SQL session object (a temporary one will be
447 generated if not supplied)
450 @return: list of Suite objects for the given name (may be empty)
453 return get_architecture(architecture, session).suites
455 __all__.append('get_architecture_suites')
457 ################################################################################
459 class Archive(object):
460 def __init__(self, *args, **kwargs):
464 return '<Archive %s>' % self.archive_name
466 __all__.append('Archive')
469 def get_archive(archive, session=None):
471 returns database id for given C{archive}.
473 @type archive: string
474 @param archive: the name of the arhive
476 @type session: Session
477 @param session: Optional SQLA session object (a temporary one will be
478 generated if not supplied)
481 @return: Archive object for the given name (None if not present)
484 archive = archive.lower()
486 q = session.query(Archive).filter_by(archive_name=archive)
490 except NoResultFound:
493 __all__.append('get_archive')
495 ################################################################################
497 class ArchiveFile(object):
498 def __init__(self, archive=None, component=None, file=None):
499 self.archive = archive
500 self.component = component
504 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
506 __all__.append('ArchiveFile')
508 ################################################################################
510 class BinContents(ORMObject):
511 def __init__(self, file = None, binary = None):
515 def properties(self):
516 return ['file', 'binary']
518 __all__.append('BinContents')
520 ################################################################################
522 def subprocess_setup():
523 # Python installs a SIGPIPE handler by default. This is usually not what
524 # non-Python subprocesses expect.
525 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
527 class DBBinary(ORMObject):
528 def __init__(self, package = None, source = None, version = None, \
529 maintainer = None, architecture = None, poolfile = None, \
530 binarytype = 'deb', fingerprint=None):
531 self.package = package
533 self.version = version
534 self.maintainer = maintainer
535 self.architecture = architecture
536 self.poolfile = poolfile
537 self.binarytype = binarytype
538 self.fingerprint = fingerprint
542 return self.binary_id
544 def properties(self):
545 return ['package', 'version', 'maintainer', 'source', 'architecture', \
546 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
547 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
549 def not_null_constraints(self):
550 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
553 metadata = association_proxy('key', 'value')
555 def scan_contents(self):
557 Yields the contents of the package. Only regular files are yielded and
558 the path names are normalized after converting them from either utf-8
559 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
560 package does not contain any regular file.
562 fullpath = self.poolfile.fullpath
563 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
564 preexec_fn = subprocess_setup)
565 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
566 for member in tar.getmembers():
567 if not member.isdir():
568 name = normpath(member.name)
569 # enforce proper utf-8 encoding
572 except UnicodeDecodeError:
573 name = name.decode('iso8859-1').encode('utf-8')
579 def read_control(self):
581 Reads the control information from a binary.
584 @return: stanza text of the control section.
587 fullpath = self.poolfile.fullpath
588 deb_file = open(fullpath, 'r')
589 stanza = utils.deb_extract_control(deb_file)
594 def read_control_fields(self):
596 Reads the control information from a binary and return
600 @return: fields of the control section as a dictionary.
603 stanza = self.read_control()
604 return apt_pkg.TagSection(stanza)
606 __all__.append('DBBinary')
609 def get_suites_binary_in(package, session=None):
611 Returns list of Suite objects which given C{package} name is in
614 @param package: DBBinary package name to search for
617 @return: list of Suite objects for the given package
620 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
622 __all__.append('get_suites_binary_in')
625 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
627 Returns the component name of the newest binary package in suite_list or
628 None if no package is found. The result can be optionally filtered by a list
629 of architecture names.
632 @param package: DBBinary package name to search for
634 @type suite_list: list of str
635 @param suite_list: list of suite_name items
637 @type arch_list: list of str
638 @param arch_list: optional list of arch_string items that defaults to []
640 @rtype: str or NoneType
641 @return: name of component or None
644 q = session.query(DBBinary).filter_by(package = package). \
645 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
646 if len(arch_list) > 0:
647 q = q.join(DBBinary.architecture). \
648 filter(Architecture.arch_string.in_(arch_list))
649 binary = q.order_by(desc(DBBinary.version)).first()
653 return binary.poolfile.component.component_name
655 __all__.append('get_component_by_package_suite')
657 ################################################################################
659 class BuildQueue(object):
660 def __init__(self, *args, **kwargs):
664 return '<BuildQueue %s>' % self.queue_name
666 __all__.append('BuildQueue')
668 ################################################################################
670 class Component(ORMObject):
671 def __init__(self, component_name = None):
672 self.component_name = component_name
674 def __eq__(self, val):
675 if isinstance(val, str):
676 return (self.component_name == val)
677 # This signals to use the normal comparison operator
678 return NotImplemented
680 def __ne__(self, val):
681 if isinstance(val, str):
682 return (self.component_name != val)
683 # This signals to use the normal comparison operator
684 return NotImplemented
686 def properties(self):
687 return ['component_name', 'component_id', 'description', \
688 'meets_dfsg', 'overrides_count']
690 def not_null_constraints(self):
691 return ['component_name']
694 __all__.append('Component')
697 def get_component(component, session=None):
699 Returns database id for given C{component}.
701 @type component: string
702 @param component: The name of the override type
705 @return: the database id for the given component
708 component = component.lower()
710 q = session.query(Component).filter_by(component_name=component)
714 except NoResultFound:
717 __all__.append('get_component')
720 def get_mapped_component(component_name, session=None):
721 """get component after mappings
723 Evaluate component mappings from ComponentMappings in dak.conf for the
724 given component name.
726 @todo: ansgar wants to get rid of this. It's currently only used for
729 @type component_name: str
730 @param component_name: component name
732 @param session: database session
734 @rtype: L{daklib.dbconn.Component} or C{None}
735 @return: component after applying maps or C{None}
738 for m in cnf.value_list("ComponentMappings"):
739 (src, dst) = m.split()
740 if component_name == src:
742 component = session.query(Component).filter_by(component_name=component_name).first()
745 __all__.append('get_mapped_component')
748 def get_component_names(session=None):
750 Returns list of strings of component names.
753 @return: list of strings of component names
756 return [ x.component_name for x in session.query(Component).all() ]
758 __all__.append('get_component_names')
760 ################################################################################
762 class DBConfig(object):
763 def __init__(self, *args, **kwargs):
767 return '<DBConfig %s>' % self.name
769 __all__.append('DBConfig')
771 ################################################################################
774 def get_or_set_contents_file_id(filename, session=None):
776 Returns database id for given filename.
778 If no matching file is found, a row is inserted.
780 @type filename: string
781 @param filename: The filename
782 @type session: SQLAlchemy
783 @param session: Optional SQL session object (a temporary one will be
784 generated if not supplied). If not passed, a commit will be performed at
785 the end of the function, otherwise the caller is responsible for commiting.
788 @return: the database id for the given component
791 q = session.query(ContentFilename).filter_by(filename=filename)
794 ret = q.one().cafilename_id
795 except NoResultFound:
796 cf = ContentFilename()
797 cf.filename = filename
799 session.commit_or_flush()
800 ret = cf.cafilename_id
804 __all__.append('get_or_set_contents_file_id')
807 def get_contents(suite, overridetype, section=None, session=None):
809 Returns contents for a suite / overridetype combination, limiting
810 to a section if not None.
813 @param suite: Suite object
815 @type overridetype: OverrideType
816 @param overridetype: OverrideType object
818 @type section: Section
819 @param section: Optional section object to limit results to
821 @type session: SQLAlchemy
822 @param session: Optional SQL session object (a temporary one will be
823 generated if not supplied)
826 @return: ResultsProxy object set up to return tuples of (filename, section,
830 # find me all of the contents for a given suite
831 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
835 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
836 JOIN content_file_names n ON (c.filename=n.id)
837 JOIN binaries b ON (b.id=c.binary_pkg)
838 JOIN override o ON (o.package=b.package)
839 JOIN section s ON (s.id=o.section)
840 WHERE o.suite = :suiteid AND o.type = :overridetypeid
841 AND b.type=:overridetypename"""
843 vals = {'suiteid': suite.suite_id,
844 'overridetypeid': overridetype.overridetype_id,
845 'overridetypename': overridetype.overridetype}
847 if section is not None:
848 contents_q += " AND s.id = :sectionid"
849 vals['sectionid'] = section.section_id
851 contents_q += " ORDER BY fn"
853 return session.execute(contents_q, vals)
855 __all__.append('get_contents')
857 ################################################################################
859 class ContentFilepath(object):
860 def __init__(self, *args, **kwargs):
864 return '<ContentFilepath %s>' % self.filepath
866 __all__.append('ContentFilepath')
869 def get_or_set_contents_path_id(filepath, session=None):
871 Returns database id for given path.
873 If no matching file is found, a row is inserted.
875 @type filepath: string
876 @param filepath: The filepath
878 @type session: SQLAlchemy
879 @param session: Optional SQL session object (a temporary one will be
880 generated if not supplied). If not passed, a commit will be performed at
881 the end of the function, otherwise the caller is responsible for commiting.
884 @return: the database id for the given path
887 q = session.query(ContentFilepath).filter_by(filepath=filepath)
890 ret = q.one().cafilepath_id
891 except NoResultFound:
892 cf = ContentFilepath()
893 cf.filepath = filepath
895 session.commit_or_flush()
896 ret = cf.cafilepath_id
900 __all__.append('get_or_set_contents_path_id')
902 ################################################################################
904 class ContentAssociation(object):
905 def __init__(self, *args, **kwargs):
909 return '<ContentAssociation %s>' % self.ca_id
911 __all__.append('ContentAssociation')
913 def insert_content_paths(binary_id, fullpaths, session=None):
915 Make sure given path is associated with given binary id
918 @param binary_id: the id of the binary
919 @type fullpaths: list
920 @param fullpaths: the list of paths of the file being associated with the binary
921 @type session: SQLAlchemy session
922 @param session: Optional SQLAlchemy session. If this is passed, the caller
923 is responsible for ensuring a transaction has begun and committing the
924 results or rolling back based on the result code. If not passed, a commit
925 will be performed at the end of the function, otherwise the caller is
926 responsible for commiting.
928 @return: True upon success
933 session = DBConn().session()
938 def generate_path_dicts():
939 for fullpath in fullpaths:
940 if fullpath.startswith( './' ):
941 fullpath = fullpath[2:]
943 yield {'filename':fullpath, 'id': binary_id }
945 for d in generate_path_dicts():
946 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
955 traceback.print_exc()
957 # Only rollback if we set up the session ourself
964 __all__.append('insert_content_paths')
966 ################################################################################
968 class DSCFile(object):
969 def __init__(self, *args, **kwargs):
973 return '<DSCFile %s>' % self.dscfile_id
975 __all__.append('DSCFile')
978 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
980 Returns a list of DSCFiles which may be empty
982 @type dscfile_id: int (optional)
983 @param dscfile_id: the dscfile_id of the DSCFiles to find
985 @type source_id: int (optional)
986 @param source_id: the source id related to the DSCFiles to find
988 @type poolfile_id: int (optional)
989 @param poolfile_id: the poolfile id related to the DSCFiles to find
992 @return: Possibly empty list of DSCFiles
995 q = session.query(DSCFile)
997 if dscfile_id is not None:
998 q = q.filter_by(dscfile_id=dscfile_id)
1000 if source_id is not None:
1001 q = q.filter_by(source_id=source_id)
1003 if poolfile_id is not None:
1004 q = q.filter_by(poolfile_id=poolfile_id)
1008 __all__.append('get_dscfiles')
1010 ################################################################################
1012 class ExternalOverride(ORMObject):
1013 def __init__(self, *args, **kwargs):
1017 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1019 __all__.append('ExternalOverride')
1021 ################################################################################
1023 class PoolFile(ORMObject):
1024 def __init__(self, filename = None, filesize = -1, \
1026 self.filename = filename
1027 self.filesize = filesize
1028 self.md5sum = md5sum
1032 session = DBConn().session().object_session(self)
1033 af = session.query(ArchiveFile).join(Archive).filter(ArchiveFile.file == self).first()
1037 def component(self):
1038 session = DBConn().session().object_session(self)
1039 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1040 .group_by(ArchiveFile.component_id).one()
1041 return session.query(Component).get(component_id)
1045 return os.path.basename(self.filename)
1047 def is_valid(self, filesize = -1, md5sum = None):
1048 return self.filesize == long(filesize) and self.md5sum == md5sum
1050 def properties(self):
1051 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1052 'sha256sum', 'source', 'binary', 'last_used']
1054 def not_null_constraints(self):
1055 return ['filename', 'md5sum']
1057 def identical_to(self, filename):
1059 compare size and hash with the given file
1062 @return: true if the given file has the same size and hash as this object; false otherwise
1064 st = os.stat(filename)
1065 if self.filesize != st.st_size:
1068 f = open(filename, "r")
1069 sha256sum = apt_pkg.sha256sum(f)
1070 if sha256sum != self.sha256sum:
1075 __all__.append('PoolFile')
1078 def get_poolfile_like_name(filename, session=None):
1080 Returns an array of PoolFile objects which are like the given name
1082 @type filename: string
1083 @param filename: the filename of the file to check against the DB
1086 @return: array of PoolFile objects
1089 # TODO: There must be a way of properly using bind parameters with %FOO%
1090 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1094 __all__.append('get_poolfile_like_name')
1096 ################################################################################
1098 class Fingerprint(ORMObject):
1099 def __init__(self, fingerprint = None):
1100 self.fingerprint = fingerprint
1102 def properties(self):
1103 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1106 def not_null_constraints(self):
1107 return ['fingerprint']
1109 __all__.append('Fingerprint')
1112 def get_fingerprint(fpr, session=None):
1114 Returns Fingerprint object for given fpr.
1117 @param fpr: The fpr to find / add
1119 @type session: SQLAlchemy
1120 @param session: Optional SQL session object (a temporary one will be
1121 generated if not supplied).
1124 @return: the Fingerprint object for the given fpr or None
1127 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1131 except NoResultFound:
1136 __all__.append('get_fingerprint')
1139 def get_or_set_fingerprint(fpr, session=None):
1141 Returns Fingerprint object for given fpr.
1143 If no matching fpr is found, a row is inserted.
1146 @param fpr: The fpr to find / add
1148 @type session: SQLAlchemy
1149 @param session: Optional SQL session object (a temporary one will be
1150 generated if not supplied). If not passed, a commit will be performed at
1151 the end of the function, otherwise the caller is responsible for commiting.
1152 A flush will be performed either way.
1155 @return: the Fingerprint object for the given fpr
1158 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1162 except NoResultFound:
1163 fingerprint = Fingerprint()
1164 fingerprint.fingerprint = fpr
1165 session.add(fingerprint)
1166 session.commit_or_flush()
1171 __all__.append('get_or_set_fingerprint')
1173 ################################################################################
1175 # Helper routine for Keyring class
1176 def get_ldap_name(entry):
1178 for k in ["cn", "mn", "sn"]:
1180 if ret and ret[0] != "" and ret[0] != "-":
1182 return " ".join(name)
1184 ################################################################################
1186 class Keyring(object):
1187 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1188 " --with-colons --fingerprint --fingerprint"
1193 def __init__(self, *args, **kwargs):
1197 return '<Keyring %s>' % self.keyring_name
1199 def de_escape_gpg_str(self, txt):
1200 esclist = re.split(r'(\\x..)', txt)
1201 for x in range(1,len(esclist),2):
1202 esclist[x] = "%c" % (int(esclist[x][2:],16))
1203 return "".join(esclist)
1205 def parse_address(self, uid):
1206 """parses uid and returns a tuple of real name and email address"""
1208 (name, address) = email.Utils.parseaddr(uid)
1209 name = re.sub(r"\s*[(].*[)]", "", name)
1210 name = self.de_escape_gpg_str(name)
1213 return (name, address)
1215 def load_keys(self, keyring):
1216 if not self.keyring_id:
1217 raise Exception('Must be initialized with database information')
1219 k = os.popen(self.gpg_invocation % keyring, "r")
1224 field = line.split(":")
1225 if field[0] == "pub":
1228 (name, addr) = self.parse_address(field[9])
1230 self.keys[key]["email"] = addr
1231 self.keys[key]["name"] = name
1232 self.keys[key]["fingerprints"] = []
1234 elif key and field[0] == "sub" and len(field) >= 12:
1235 signingkey = ("s" in field[11])
1236 elif key and field[0] == "uid":
1237 (name, addr) = self.parse_address(field[9])
1238 if "email" not in self.keys[key] and "@" in addr:
1239 self.keys[key]["email"] = addr
1240 self.keys[key]["name"] = name
1241 elif signingkey and field[0] == "fpr":
1242 self.keys[key]["fingerprints"].append(field[9])
1243 self.fpr_lookup[field[9]] = key
1245 def import_users_from_ldap(self, session):
1249 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1250 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1252 l = ldap.open(LDAPServer)
1253 l.simple_bind_s("","")
1254 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1255 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1256 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1258 ldap_fin_uid_id = {}
1265 uid = entry["uid"][0]
1266 name = get_ldap_name(entry)
1267 fingerprints = entry["keyFingerPrint"]
1269 for f in fingerprints:
1270 key = self.fpr_lookup.get(f, None)
1271 if key not in self.keys:
1273 self.keys[key]["uid"] = uid
1277 keyid = get_or_set_uid(uid, session).uid_id
1278 byuid[keyid] = (uid, name)
1279 byname[uid] = (keyid, name)
1281 return (byname, byuid)
1283 def generate_users_from_keyring(self, format, session):
1287 for x in self.keys.keys():
1288 if "email" not in self.keys[x]:
1290 self.keys[x]["uid"] = format % "invalid-uid"
1292 uid = format % self.keys[x]["email"]
1293 keyid = get_or_set_uid(uid, session).uid_id
1294 byuid[keyid] = (uid, self.keys[x]["name"])
1295 byname[uid] = (keyid, self.keys[x]["name"])
1296 self.keys[x]["uid"] = uid
1299 uid = format % "invalid-uid"
1300 keyid = get_or_set_uid(uid, session).uid_id
1301 byuid[keyid] = (uid, "ungeneratable user id")
1302 byname[uid] = (keyid, "ungeneratable user id")
1304 return (byname, byuid)
1306 __all__.append('Keyring')
1309 def get_keyring(keyring, session=None):
1311 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1312 If C{keyring} already has an entry, simply return the existing Keyring
1314 @type keyring: string
1315 @param keyring: the keyring name
1318 @return: the Keyring object for this keyring
1321 q = session.query(Keyring).filter_by(keyring_name=keyring)
1325 except NoResultFound:
1328 __all__.append('get_keyring')
1331 def get_active_keyring_paths(session=None):
1334 @return: list of active keyring paths
1336 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1338 __all__.append('get_active_keyring_paths')
1341 def get_primary_keyring_path(session=None):
1343 Get the full path to the highest priority active keyring
1346 @return: path to the active keyring with the highest priority or None if no
1347 keyring is configured
1349 keyrings = get_active_keyring_paths()
1351 if len(keyrings) > 0:
1356 __all__.append('get_primary_keyring_path')
1358 ################################################################################
1360 class DBChange(object):
1361 def __init__(self, *args, **kwargs):
1365 return '<DBChange %s>' % self.changesname
1367 __all__.append('DBChange')
1370 def get_dbchange(filename, session=None):
1372 returns DBChange object for given C{filename}.
1374 @type filename: string
1375 @param filename: the name of the file
1377 @type session: Session
1378 @param session: Optional SQLA session object (a temporary one will be
1379 generated if not supplied)
1382 @return: DBChange object for the given filename (C{None} if not present)
1385 q = session.query(DBChange).filter_by(changesname=filename)
1389 except NoResultFound:
1392 __all__.append('get_dbchange')
1394 ################################################################################
1396 class Maintainer(ORMObject):
1397 def __init__(self, name = None):
1400 def properties(self):
1401 return ['name', 'maintainer_id']
1403 def not_null_constraints(self):
1406 def get_split_maintainer(self):
1407 if not hasattr(self, 'name') or self.name is None:
1408 return ('', '', '', '')
1410 return fix_maintainer(self.name.strip())
1412 __all__.append('Maintainer')
1415 def get_or_set_maintainer(name, session=None):
1417 Returns Maintainer object for given maintainer name.
1419 If no matching maintainer name is found, a row is inserted.
1422 @param name: The maintainer name to add
1424 @type session: SQLAlchemy
1425 @param session: Optional SQL session object (a temporary one will be
1426 generated if not supplied). If not passed, a commit will be performed at
1427 the end of the function, otherwise the caller is responsible for commiting.
1428 A flush will be performed either way.
1431 @return: the Maintainer object for the given maintainer
1434 q = session.query(Maintainer).filter_by(name=name)
1437 except NoResultFound:
1438 maintainer = Maintainer()
1439 maintainer.name = name
1440 session.add(maintainer)
1441 session.commit_or_flush()
1446 __all__.append('get_or_set_maintainer')
1449 def get_maintainer(maintainer_id, session=None):
1451 Return the name of the maintainer behind C{maintainer_id} or None if that
1452 maintainer_id is invalid.
1454 @type maintainer_id: int
1455 @param maintainer_id: the id of the maintainer
1458 @return: the Maintainer with this C{maintainer_id}
1461 return session.query(Maintainer).get(maintainer_id)
1463 __all__.append('get_maintainer')
1465 ################################################################################
1467 class NewComment(object):
1468 def __init__(self, *args, **kwargs):
1472 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1474 __all__.append('NewComment')
1477 def has_new_comment(policy_queue, package, version, session=None):
1479 Returns true if the given combination of C{package}, C{version} has a comment.
1481 @type package: string
1482 @param package: name of the package
1484 @type version: string
1485 @param version: package version
1487 @type session: Session
1488 @param session: Optional SQLA session object (a temporary one will be
1489 generated if not supplied)
1495 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1496 q = q.filter_by(package=package)
1497 q = q.filter_by(version=version)
1499 return bool(q.count() > 0)
1501 __all__.append('has_new_comment')
1504 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1506 Returns (possibly empty) list of NewComment objects for the given
1509 @type package: string (optional)
1510 @param package: name of the package
1512 @type version: string (optional)
1513 @param version: package version
1515 @type comment_id: int (optional)
1516 @param comment_id: An id of a comment
1518 @type session: Session
1519 @param session: Optional SQLA session object (a temporary one will be
1520 generated if not supplied)
1523 @return: A (possibly empty) list of NewComment objects will be returned
1526 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1527 if package is not None: q = q.filter_by(package=package)
1528 if version is not None: q = q.filter_by(version=version)
1529 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1533 __all__.append('get_new_comments')
1535 ################################################################################
1537 class Override(ORMObject):
1538 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1539 section = None, priority = None):
1540 self.package = package
1542 self.component = component
1543 self.overridetype = overridetype
1544 self.section = section
1545 self.priority = priority
1547 def properties(self):
1548 return ['package', 'suite', 'component', 'overridetype', 'section', \
1551 def not_null_constraints(self):
1552 return ['package', 'suite', 'component', 'overridetype', 'section']
1554 __all__.append('Override')
1557 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1559 Returns Override object for the given parameters
1561 @type package: string
1562 @param package: The name of the package
1564 @type suite: string, list or None
1565 @param suite: The name of the suite (or suites if a list) to limit to. If
1566 None, don't limit. Defaults to None.
1568 @type component: string, list or None
1569 @param component: The name of the component (or components if a list) to
1570 limit to. If None, don't limit. Defaults to None.
1572 @type overridetype: string, list or None
1573 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1574 limit to. If None, don't limit. Defaults to None.
1576 @type session: Session
1577 @param session: Optional SQLA session object (a temporary one will be
1578 generated if not supplied)
1581 @return: A (possibly empty) list of Override objects will be returned
1584 q = session.query(Override)
1585 q = q.filter_by(package=package)
1587 if suite is not None:
1588 if not isinstance(suite, list): suite = [suite]
1589 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1591 if component is not None:
1592 if not isinstance(component, list): component = [component]
1593 q = q.join(Component).filter(Component.component_name.in_(component))
1595 if overridetype is not None:
1596 if not isinstance(overridetype, list): overridetype = [overridetype]
1597 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1601 __all__.append('get_override')
1604 ################################################################################
1606 class OverrideType(ORMObject):
1607 def __init__(self, overridetype = None):
1608 self.overridetype = overridetype
1610 def properties(self):
1611 return ['overridetype', 'overridetype_id', 'overrides_count']
1613 def not_null_constraints(self):
1614 return ['overridetype']
1616 __all__.append('OverrideType')
1619 def get_override_type(override_type, session=None):
1621 Returns OverrideType object for given C{override type}.
1623 @type override_type: string
1624 @param override_type: The name of the override type
1626 @type session: Session
1627 @param session: Optional SQLA session object (a temporary one will be
1628 generated if not supplied)
1631 @return: the database id for the given override type
1634 q = session.query(OverrideType).filter_by(overridetype=override_type)
1638 except NoResultFound:
1641 __all__.append('get_override_type')
1643 ################################################################################
1645 class PolicyQueue(object):
1646 def __init__(self, *args, **kwargs):
1650 return '<PolicyQueue %s>' % self.queue_name
1652 __all__.append('PolicyQueue')
1655 def get_policy_queue(queuename, session=None):
1657 Returns PolicyQueue object for given C{queue name}
1659 @type queuename: string
1660 @param queuename: The name of the queue
1662 @type session: Session
1663 @param session: Optional SQLA session object (a temporary one will be
1664 generated if not supplied)
1667 @return: PolicyQueue object for the given queue
1670 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1674 except NoResultFound:
1677 __all__.append('get_policy_queue')
1679 ################################################################################
1681 class PolicyQueueUpload(object):
1682 def __cmp__(self, other):
1683 ret = cmp(self.changes.source, other.changes.source)
1685 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1687 if self.source is not None and other.source is None:
1689 elif self.source is None and other.source is not None:
1692 ret = cmp(self.changes.changesname, other.changes.changesname)
1695 __all__.append('PolicyQueueUpload')
1697 ################################################################################
1699 class PolicyQueueByhandFile(object):
1702 __all__.append('PolicyQueueByhandFile')
1704 ################################################################################
1706 class Priority(ORMObject):
1707 def __init__(self, priority = None, level = None):
1708 self.priority = priority
1711 def properties(self):
1712 return ['priority', 'priority_id', 'level', 'overrides_count']
1714 def not_null_constraints(self):
1715 return ['priority', 'level']
1717 def __eq__(self, val):
1718 if isinstance(val, str):
1719 return (self.priority == val)
1720 # This signals to use the normal comparison operator
1721 return NotImplemented
1723 def __ne__(self, val):
1724 if isinstance(val, str):
1725 return (self.priority != val)
1726 # This signals to use the normal comparison operator
1727 return NotImplemented
1729 __all__.append('Priority')
1732 def get_priority(priority, session=None):
1734 Returns Priority object for given C{priority name}.
1736 @type priority: string
1737 @param priority: The name of the priority
1739 @type session: Session
1740 @param session: Optional SQLA session object (a temporary one will be
1741 generated if not supplied)
1744 @return: Priority object for the given priority
1747 q = session.query(Priority).filter_by(priority=priority)
1751 except NoResultFound:
1754 __all__.append('get_priority')
1757 def get_priorities(session=None):
1759 Returns dictionary of priority names -> id mappings
1761 @type session: Session
1762 @param session: Optional SQL session object (a temporary one will be
1763 generated if not supplied)
1766 @return: dictionary of priority names -> id mappings
1770 q = session.query(Priority)
1772 ret[x.priority] = x.priority_id
1776 __all__.append('get_priorities')
1778 ################################################################################
1780 class Section(ORMObject):
1781 def __init__(self, section = None):
1782 self.section = section
1784 def properties(self):
1785 return ['section', 'section_id', 'overrides_count']
1787 def not_null_constraints(self):
1790 def __eq__(self, val):
1791 if isinstance(val, str):
1792 return (self.section == val)
1793 # This signals to use the normal comparison operator
1794 return NotImplemented
1796 def __ne__(self, val):
1797 if isinstance(val, str):
1798 return (self.section != val)
1799 # This signals to use the normal comparison operator
1800 return NotImplemented
1802 __all__.append('Section')
1805 def get_section(section, session=None):
1807 Returns Section object for given C{section name}.
1809 @type section: string
1810 @param section: The name of the section
1812 @type session: Session
1813 @param session: Optional SQLA session object (a temporary one will be
1814 generated if not supplied)
1817 @return: Section object for the given section name
1820 q = session.query(Section).filter_by(section=section)
1824 except NoResultFound:
1827 __all__.append('get_section')
1830 def get_sections(session=None):
1832 Returns dictionary of section names -> id mappings
1834 @type session: Session
1835 @param session: Optional SQL session object (a temporary one will be
1836 generated if not supplied)
1839 @return: dictionary of section names -> id mappings
1843 q = session.query(Section)
1845 ret[x.section] = x.section_id
1849 __all__.append('get_sections')
1851 ################################################################################
1853 class SignatureHistory(ORMObject):
1855 def from_signed_file(cls, signed_file):
1856 """signature history entry from signed file
1858 @type signed_file: L{daklib.gpg.SignedFile}
1859 @param signed_file: signed file
1861 @rtype: L{SignatureHistory}
1864 self.fingerprint = signed_file.primary_fingerprint
1865 self.signature_timestamp = signed_file.signature_timestamp
1866 self.contents_sha1 = signed_file.contents_sha1()
1869 __all__.append('SignatureHistory')
1871 ################################################################################
1873 class SrcContents(ORMObject):
1874 def __init__(self, file = None, source = None):
1876 self.source = source
1878 def properties(self):
1879 return ['file', 'source']
1881 __all__.append('SrcContents')
1883 ################################################################################
1885 from debian.debfile import Deb822
1887 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1888 class Dak822(Deb822):
1889 def _internal_parser(self, sequence, fields=None):
1890 # The key is non-whitespace, non-colon characters before any colon.
1891 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1892 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1893 multi = re.compile(key_part + r"$")
1894 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1896 wanted_field = lambda f: fields is None or f in fields
1898 if isinstance(sequence, basestring):
1899 sequence = sequence.splitlines()
1903 for line in self.gpg_stripped_paragraph(sequence):
1904 m = single.match(line)
1907 self[curkey] = content
1909 if not wanted_field(m.group('key')):
1913 curkey = m.group('key')
1914 content = m.group('data')
1917 m = multi.match(line)
1920 self[curkey] = content
1922 if not wanted_field(m.group('key')):
1926 curkey = m.group('key')
1930 m = multidata.match(line)
1932 content += '\n' + line # XXX not m.group('data')?
1936 self[curkey] = content
1939 class DBSource(ORMObject):
1940 def __init__(self, source = None, version = None, maintainer = None, \
1941 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1942 self.source = source
1943 self.version = version
1944 self.maintainer = maintainer
1945 self.changedby = changedby
1946 self.poolfile = poolfile
1947 self.install_date = install_date
1948 self.fingerprint = fingerprint
1952 return self.source_id
1954 def properties(self):
1955 return ['source', 'source_id', 'maintainer', 'changedby', \
1956 'fingerprint', 'poolfile', 'version', 'suites_count', \
1957 'install_date', 'binaries_count', 'uploaders_count']
1959 def not_null_constraints(self):
1960 return ['source', 'version', 'install_date', 'maintainer', \
1961 'changedby', 'poolfile']
1963 def read_control_fields(self):
1965 Reads the control information from a dsc
1968 @return: fields is the dsc information in a dictionary form
1970 fullpath = self.poolfile.fullpath
1971 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1974 metadata = association_proxy('key', 'value')
1976 def scan_contents(self):
1978 Returns a set of names for non directories. The path names are
1979 normalized after converting them from either utf-8 or iso8859-1
1982 fullpath = self.poolfile.fullpath
1983 from daklib.contents import UnpackedSource
1984 unpacked = UnpackedSource(fullpath)
1986 for name in unpacked.get_all_filenames():
1987 # enforce proper utf-8 encoding
1989 name.decode('utf-8')
1990 except UnicodeDecodeError:
1991 name = name.decode('iso8859-1').encode('utf-8')
1995 __all__.append('DBSource')
1998 def source_exists(source, source_version, suites = ["any"], session=None):
2000 Ensure that source exists somewhere in the archive for the binary
2001 upload being processed.
2002 1. exact match => 1.0-3
2003 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2005 @type source: string
2006 @param source: source name
2008 @type source_version: string
2009 @param source_version: expected source version
2012 @param suites: list of suites to check in, default I{any}
2014 @type session: Session
2015 @param session: Optional SQLA session object (a temporary one will be
2016 generated if not supplied)
2019 @return: returns 1 if a source with expected version is found, otherwise 0
2026 from daklib.regexes import re_bin_only_nmu
2027 orig_source_version = re_bin_only_nmu.sub('', source_version)
2029 for suite in suites:
2030 q = session.query(DBSource).filter_by(source=source). \
2031 filter(DBSource.version.in_([source_version, orig_source_version]))
2033 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2034 s = get_suite(suite, session)
2036 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2037 considered_suites = [ vc.reference for vc in enhances_vcs ]
2038 considered_suites.append(s)
2040 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2045 # No source found so return not ok
2050 __all__.append('source_exists')
2053 def get_suites_source_in(source, session=None):
2055 Returns list of Suite objects which given C{source} name is in
2058 @param source: DBSource package name to search for
2061 @return: list of Suite objects for the given source
2064 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2066 __all__.append('get_suites_source_in')
2069 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2071 Returns list of DBSource objects for given C{source} name and other parameters
2074 @param source: DBSource package name to search for
2076 @type version: str or None
2077 @param version: DBSource version name to search for or None if not applicable
2079 @type dm_upload_allowed: bool
2080 @param dm_upload_allowed: If None, no effect. If True or False, only
2081 return packages with that dm_upload_allowed setting
2083 @type session: Session
2084 @param session: Optional SQL session object (a temporary one will be
2085 generated if not supplied)
2088 @return: list of DBSource objects for the given name (may be empty)
2091 q = session.query(DBSource).filter_by(source=source)
2093 if version is not None:
2094 q = q.filter_by(version=version)
2096 if dm_upload_allowed is not None:
2097 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2101 __all__.append('get_sources_from_name')
2103 # FIXME: This function fails badly if it finds more than 1 source package and
2104 # its implementation is trivial enough to be inlined.
2106 def get_source_in_suite(source, suite, session=None):
2108 Returns a DBSource object for a combination of C{source} and C{suite}.
2110 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2111 - B{suite} - a suite name, eg. I{unstable}
2113 @type source: string
2114 @param source: source package name
2117 @param suite: the suite name
2120 @return: the version for I{source} in I{suite}
2124 q = get_suite(suite, session).get_sources(source)
2127 except NoResultFound:
2130 __all__.append('get_source_in_suite')
2133 def import_metadata_into_db(obj, session=None):
2135 This routine works on either DBBinary or DBSource objects and imports
2136 their metadata into the database
2138 fields = obj.read_control_fields()
2139 for k in fields.keys():
2142 val = str(fields[k])
2143 except UnicodeEncodeError:
2144 # Fall back to UTF-8
2146 val = fields[k].encode('utf-8')
2147 except UnicodeEncodeError:
2148 # Finally try iso8859-1
2149 val = fields[k].encode('iso8859-1')
2150 # Otherwise we allow the exception to percolate up and we cause
2151 # a reject as someone is playing silly buggers
2153 obj.metadata[get_or_set_metadatakey(k, session)] = val
2155 session.commit_or_flush()
2157 __all__.append('import_metadata_into_db')
2159 ################################################################################
2161 class SrcFormat(object):
2162 def __init__(self, *args, **kwargs):
2166 return '<SrcFormat %s>' % (self.format_name)
2168 __all__.append('SrcFormat')
2170 ################################################################################
2172 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2173 ('SuiteID', 'suite_id'),
2174 ('Version', 'version'),
2175 ('Origin', 'origin'),
2177 ('Description', 'description'),
2178 ('Untouchable', 'untouchable'),
2179 ('Announce', 'announce'),
2180 ('Codename', 'codename'),
2181 ('OverrideCodename', 'overridecodename'),
2182 ('ValidTime', 'validtime'),
2183 ('Priority', 'priority'),
2184 ('NotAutomatic', 'notautomatic'),
2185 ('CopyChanges', 'copychanges'),
2186 ('OverrideSuite', 'overridesuite')]
2188 # Why the heck don't we have any UNIQUE constraints in table suite?
2189 # TODO: Add UNIQUE constraints for appropriate columns.
2190 class Suite(ORMObject):
2191 def __init__(self, suite_name = None, version = None):
2192 self.suite_name = suite_name
2193 self.version = version
2195 def properties(self):
2196 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2199 def not_null_constraints(self):
2200 return ['suite_name']
2202 def __eq__(self, val):
2203 if isinstance(val, str):
2204 return (self.suite_name == val)
2205 # This signals to use the normal comparison operator
2206 return NotImplemented
2208 def __ne__(self, val):
2209 if isinstance(val, str):
2210 return (self.suite_name != val)
2211 # This signals to use the normal comparison operator
2212 return NotImplemented
2216 for disp, field in SUITE_FIELDS:
2217 val = getattr(self, field, None)
2219 ret.append("%s: %s" % (disp, val))
2221 return "\n".join(ret)
2223 def get_architectures(self, skipsrc=False, skipall=False):
2225 Returns list of Architecture objects
2227 @type skipsrc: boolean
2228 @param skipsrc: Whether to skip returning the 'source' architecture entry
2231 @type skipall: boolean
2232 @param skipall: Whether to skip returning the 'all' architecture entry
2236 @return: list of Architecture objects for the given name (may be empty)
2239 q = object_session(self).query(Architecture).with_parent(self)
2241 q = q.filter(Architecture.arch_string != 'source')
2243 q = q.filter(Architecture.arch_string != 'all')
2244 return q.order_by(Architecture.arch_string).all()
2246 def get_sources(self, source):
2248 Returns a query object representing DBSource that is part of C{suite}.
2250 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2252 @type source: string
2253 @param source: source package name
2255 @rtype: sqlalchemy.orm.query.Query
2256 @return: a query of DBSource
2260 session = object_session(self)
2261 return session.query(DBSource).filter_by(source = source). \
2264 def get_overridesuite(self):
2265 if self.overridesuite is None:
2268 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2272 return os.path.join(self.archive.path, 'dists', self.suite_name)
2274 __all__.append('Suite')
2277 def get_suite(suite, session=None):
2279 Returns Suite object for given C{suite name}.
2282 @param suite: The name of the suite
2284 @type session: Session
2285 @param session: Optional SQLA session object (a temporary one will be
2286 generated if not supplied)
2289 @return: Suite object for the requested suite name (None if not present)
2292 q = session.query(Suite).filter_by(suite_name=suite)
2296 except NoResultFound:
2299 __all__.append('get_suite')
2301 ################################################################################
2304 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2306 Returns list of Architecture objects for given C{suite} name. The list is
2307 empty if suite does not exist.
2310 @param suite: Suite name to search for
2312 @type skipsrc: boolean
2313 @param skipsrc: Whether to skip returning the 'source' architecture entry
2316 @type skipall: boolean
2317 @param skipall: Whether to skip returning the 'all' architecture entry
2320 @type session: Session
2321 @param session: Optional SQL session object (a temporary one will be
2322 generated if not supplied)
2325 @return: list of Architecture objects for the given name (may be empty)
2329 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2330 except AttributeError:
2333 __all__.append('get_suite_architectures')
2335 ################################################################################
2337 class Uid(ORMObject):
2338 def __init__(self, uid = None, name = None):
2342 def __eq__(self, val):
2343 if isinstance(val, str):
2344 return (self.uid == val)
2345 # This signals to use the normal comparison operator
2346 return NotImplemented
2348 def __ne__(self, val):
2349 if isinstance(val, str):
2350 return (self.uid != val)
2351 # This signals to use the normal comparison operator
2352 return NotImplemented
2354 def properties(self):
2355 return ['uid', 'name', 'fingerprint']
2357 def not_null_constraints(self):
2360 __all__.append('Uid')
2363 def get_or_set_uid(uidname, session=None):
2365 Returns uid object for given uidname.
2367 If no matching uidname is found, a row is inserted.
2369 @type uidname: string
2370 @param uidname: The uid to add
2372 @type session: SQLAlchemy
2373 @param session: Optional SQL session object (a temporary one will be
2374 generated if not supplied). If not passed, a commit will be performed at
2375 the end of the function, otherwise the caller is responsible for commiting.
2378 @return: the uid object for the given uidname
2381 q = session.query(Uid).filter_by(uid=uidname)
2385 except NoResultFound:
2389 session.commit_or_flush()
2394 __all__.append('get_or_set_uid')
2397 def get_uid_from_fingerprint(fpr, session=None):
2398 q = session.query(Uid)
2399 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2403 except NoResultFound:
2406 __all__.append('get_uid_from_fingerprint')
2408 ################################################################################
2410 class MetadataKey(ORMObject):
2411 def __init__(self, key = None):
2414 def properties(self):
2417 def not_null_constraints(self):
2420 __all__.append('MetadataKey')
2423 def get_or_set_metadatakey(keyname, session=None):
2425 Returns MetadataKey object for given uidname.
2427 If no matching keyname is found, a row is inserted.
2429 @type uidname: string
2430 @param uidname: The keyname to add
2432 @type session: SQLAlchemy
2433 @param session: Optional SQL session object (a temporary one will be
2434 generated if not supplied). If not passed, a commit will be performed at
2435 the end of the function, otherwise the caller is responsible for commiting.
2438 @return: the metadatakey object for the given keyname
2441 q = session.query(MetadataKey).filter_by(key=keyname)
2445 except NoResultFound:
2446 ret = MetadataKey(keyname)
2448 session.commit_or_flush()
2452 __all__.append('get_or_set_metadatakey')
2454 ################################################################################
2456 class BinaryMetadata(ORMObject):
2457 def __init__(self, key = None, value = None, binary = None):
2460 self.binary = binary
2462 def properties(self):
2463 return ['binary', 'key', 'value']
2465 def not_null_constraints(self):
2468 __all__.append('BinaryMetadata')
2470 ################################################################################
2472 class SourceMetadata(ORMObject):
2473 def __init__(self, key = None, value = None, source = None):
2476 self.source = source
2478 def properties(self):
2479 return ['source', 'key', 'value']
2481 def not_null_constraints(self):
2484 __all__.append('SourceMetadata')
2486 ################################################################################
2488 class VersionCheck(ORMObject):
2489 def __init__(self, *args, **kwargs):
2492 def properties(self):
2493 #return ['suite_id', 'check', 'reference_id']
2496 def not_null_constraints(self):
2497 return ['suite', 'check', 'reference']
2499 __all__.append('VersionCheck')
2502 def get_version_checks(suite_name, check = None, session = None):
2503 suite = get_suite(suite_name, session)
2505 # Make sure that what we return is iterable so that list comprehensions
2506 # involving this don't cause a traceback
2508 q = session.query(VersionCheck).filter_by(suite=suite)
2510 q = q.filter_by(check=check)
2513 __all__.append('get_version_checks')
2515 ################################################################################
2517 class DBConn(object):
2519 database module init.
2523 def __init__(self, *args, **kwargs):
2524 self.__dict__ = self.__shared_state
2526 if not getattr(self, 'initialised', False):
2527 self.initialised = True
2528 self.debug = kwargs.has_key('debug')
2531 def __setuptables(self):
2534 'acl_architecture_map',
2535 'acl_fingerprint_map',
2542 'binaries_metadata',
2549 'external_overrides',
2550 'extra_src_references',
2552 'files_archive_map',
2558 # TODO: the maintainer column in table override should be removed.
2562 'policy_queue_upload',
2563 'policy_queue_upload_binaries_map',
2564 'policy_queue_byhand_file',
2567 'signature_history',
2576 'suite_architectures',
2577 'suite_build_queue_copy',
2578 'suite_src_formats',
2584 'almost_obsolete_all_associations',
2585 'almost_obsolete_src_associations',
2586 'any_associations_source',
2587 'bin_associations_binaries',
2588 'binaries_suite_arch',
2591 'newest_all_associations',
2592 'newest_any_associations',
2594 'newest_src_association',
2595 'obsolete_all_associations',
2596 'obsolete_any_associations',
2597 'obsolete_any_by_all_associations',
2598 'obsolete_src_associations',
2600 'src_associations_bin',
2601 'src_associations_src',
2602 'suite_arch_by_name',
2605 for table_name in tables:
2606 table = Table(table_name, self.db_meta, \
2607 autoload=True, useexisting=True)
2608 setattr(self, 'tbl_%s' % table_name, table)
2610 for view_name in views:
2611 view = Table(view_name, self.db_meta, autoload=True)
2612 setattr(self, 'view_%s' % view_name, view)
2614 def __setupmappers(self):
2615 mapper(Architecture, self.tbl_architecture,
2616 properties = dict(arch_id = self.tbl_architecture.c.id,
2617 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2618 order_by=self.tbl_suite.c.suite_name,
2619 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2620 extension = validator)
2622 mapper(ACL, self.tbl_acl,
2624 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2625 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2626 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2627 per_source = relation(ACLPerSource, collection_class=set),
2630 mapper(ACLPerSource, self.tbl_acl_per_source,
2632 acl = relation(ACL),
2633 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2634 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2637 mapper(Archive, self.tbl_archive,
2638 properties = dict(archive_id = self.tbl_archive.c.id,
2639 archive_name = self.tbl_archive.c.name))
2641 mapper(ArchiveFile, self.tbl_files_archive_map,
2642 properties = dict(archive = relation(Archive, backref='files'),
2643 component = relation(Component),
2644 file = relation(PoolFile, backref='archives')))
2646 mapper(BuildQueue, self.tbl_build_queue,
2647 properties = dict(queue_id = self.tbl_build_queue.c.id,
2648 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2650 mapper(DBBinary, self.tbl_binaries,
2651 properties = dict(binary_id = self.tbl_binaries.c.id,
2652 package = self.tbl_binaries.c.package,
2653 version = self.tbl_binaries.c.version,
2654 maintainer_id = self.tbl_binaries.c.maintainer,
2655 maintainer = relation(Maintainer),
2656 source_id = self.tbl_binaries.c.source,
2657 source = relation(DBSource, backref='binaries'),
2658 arch_id = self.tbl_binaries.c.architecture,
2659 architecture = relation(Architecture),
2660 poolfile_id = self.tbl_binaries.c.file,
2661 poolfile = relation(PoolFile),
2662 binarytype = self.tbl_binaries.c.type,
2663 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2664 fingerprint = relation(Fingerprint),
2665 install_date = self.tbl_binaries.c.install_date,
2666 suites = relation(Suite, secondary=self.tbl_bin_associations,
2667 backref=backref('binaries', lazy='dynamic')),
2668 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2669 backref=backref('extra_binary_references', lazy='dynamic')),
2670 key = relation(BinaryMetadata, cascade='all',
2671 collection_class=attribute_mapped_collection('key'))),
2672 extension = validator)
2674 mapper(Component, self.tbl_component,
2675 properties = dict(component_id = self.tbl_component.c.id,
2676 component_name = self.tbl_component.c.name),
2677 extension = validator)
2679 mapper(DBConfig, self.tbl_config,
2680 properties = dict(config_id = self.tbl_config.c.id))
2682 mapper(DSCFile, self.tbl_dsc_files,
2683 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2684 source_id = self.tbl_dsc_files.c.source,
2685 source = relation(DBSource),
2686 poolfile_id = self.tbl_dsc_files.c.file,
2687 poolfile = relation(PoolFile)))
2689 mapper(ExternalOverride, self.tbl_external_overrides,
2691 suite_id = self.tbl_external_overrides.c.suite,
2692 suite = relation(Suite),
2693 component_id = self.tbl_external_overrides.c.component,
2694 component = relation(Component)))
2696 mapper(PoolFile, self.tbl_files,
2697 properties = dict(file_id = self.tbl_files.c.id,
2698 filesize = self.tbl_files.c.size),
2699 extension = validator)
2701 mapper(Fingerprint, self.tbl_fingerprint,
2702 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2703 uid_id = self.tbl_fingerprint.c.uid,
2704 uid = relation(Uid),
2705 keyring_id = self.tbl_fingerprint.c.keyring,
2706 keyring = relation(Keyring),
2707 acl = relation(ACL)),
2708 extension = validator)
2710 mapper(Keyring, self.tbl_keyrings,
2711 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2712 keyring_id = self.tbl_keyrings.c.id,
2713 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2715 mapper(DBChange, self.tbl_changes,
2716 properties = dict(change_id = self.tbl_changes.c.id,
2717 seen = self.tbl_changes.c.seen,
2718 source = self.tbl_changes.c.source,
2719 binaries = self.tbl_changes.c.binaries,
2720 architecture = self.tbl_changes.c.architecture,
2721 distribution = self.tbl_changes.c.distribution,
2722 urgency = self.tbl_changes.c.urgency,
2723 maintainer = self.tbl_changes.c.maintainer,
2724 changedby = self.tbl_changes.c.changedby,
2725 date = self.tbl_changes.c.date,
2726 version = self.tbl_changes.c.version))
2728 mapper(Maintainer, self.tbl_maintainer,
2729 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2730 maintains_sources = relation(DBSource, backref='maintainer',
2731 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2732 changed_sources = relation(DBSource, backref='changedby',
2733 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2734 extension = validator)
2736 mapper(NewComment, self.tbl_new_comments,
2737 properties = dict(comment_id = self.tbl_new_comments.c.id,
2738 policy_queue = relation(PolicyQueue)))
2740 mapper(Override, self.tbl_override,
2741 properties = dict(suite_id = self.tbl_override.c.suite,
2742 suite = relation(Suite, \
2743 backref=backref('overrides', lazy='dynamic')),
2744 package = self.tbl_override.c.package,
2745 component_id = self.tbl_override.c.component,
2746 component = relation(Component, \
2747 backref=backref('overrides', lazy='dynamic')),
2748 priority_id = self.tbl_override.c.priority,
2749 priority = relation(Priority, \
2750 backref=backref('overrides', lazy='dynamic')),
2751 section_id = self.tbl_override.c.section,
2752 section = relation(Section, \
2753 backref=backref('overrides', lazy='dynamic')),
2754 overridetype_id = self.tbl_override.c.type,
2755 overridetype = relation(OverrideType, \
2756 backref=backref('overrides', lazy='dynamic'))))
2758 mapper(OverrideType, self.tbl_override_type,
2759 properties = dict(overridetype = self.tbl_override_type.c.type,
2760 overridetype_id = self.tbl_override_type.c.id))
2762 mapper(PolicyQueue, self.tbl_policy_queue,
2763 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2764 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2766 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2768 changes = relation(DBChange),
2769 policy_queue = relation(PolicyQueue, backref='uploads'),
2770 target_suite = relation(Suite),
2771 source = relation(DBSource),
2772 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2775 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2777 upload = relation(PolicyQueueUpload, backref='byhand'),
2781 mapper(Priority, self.tbl_priority,
2782 properties = dict(priority_id = self.tbl_priority.c.id))
2784 mapper(Section, self.tbl_section,
2785 properties = dict(section_id = self.tbl_section.c.id,
2786 section=self.tbl_section.c.section))
2788 mapper(SignatureHistory, self.tbl_signature_history)
2790 mapper(DBSource, self.tbl_source,
2791 properties = dict(source_id = self.tbl_source.c.id,
2792 version = self.tbl_source.c.version,
2793 maintainer_id = self.tbl_source.c.maintainer,
2794 poolfile_id = self.tbl_source.c.file,
2795 poolfile = relation(PoolFile),
2796 fingerprint_id = self.tbl_source.c.sig_fpr,
2797 fingerprint = relation(Fingerprint),
2798 changedby_id = self.tbl_source.c.changedby,
2799 srcfiles = relation(DSCFile,
2800 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2801 suites = relation(Suite, secondary=self.tbl_src_associations,
2802 backref=backref('sources', lazy='dynamic')),
2803 uploaders = relation(Maintainer,
2804 secondary=self.tbl_src_uploaders),
2805 key = relation(SourceMetadata, cascade='all',
2806 collection_class=attribute_mapped_collection('key'))),
2807 extension = validator)
2809 mapper(SrcFormat, self.tbl_src_format,
2810 properties = dict(src_format_id = self.tbl_src_format.c.id,
2811 format_name = self.tbl_src_format.c.format_name))
2813 mapper(Suite, self.tbl_suite,
2814 properties = dict(suite_id = self.tbl_suite.c.id,
2815 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2816 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2817 copy_queues = relation(BuildQueue,
2818 secondary=self.tbl_suite_build_queue_copy),
2819 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2820 backref=backref('suites', lazy='dynamic')),
2821 archive = relation(Archive, backref='suites'),
2822 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set)),
2823 extension = validator)
2825 mapper(Uid, self.tbl_uid,
2826 properties = dict(uid_id = self.tbl_uid.c.id,
2827 fingerprint = relation(Fingerprint)),
2828 extension = validator)
2830 mapper(BinContents, self.tbl_bin_contents,
2832 binary = relation(DBBinary,
2833 backref=backref('contents', lazy='dynamic', cascade='all')),
2834 file = self.tbl_bin_contents.c.file))
2836 mapper(SrcContents, self.tbl_src_contents,
2838 source = relation(DBSource,
2839 backref=backref('contents', lazy='dynamic', cascade='all')),
2840 file = self.tbl_src_contents.c.file))
2842 mapper(MetadataKey, self.tbl_metadata_keys,
2844 key_id = self.tbl_metadata_keys.c.key_id,
2845 key = self.tbl_metadata_keys.c.key))
2847 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2849 binary_id = self.tbl_binaries_metadata.c.bin_id,
2850 binary = relation(DBBinary),
2851 key_id = self.tbl_binaries_metadata.c.key_id,
2852 key = relation(MetadataKey),
2853 value = self.tbl_binaries_metadata.c.value))
2855 mapper(SourceMetadata, self.tbl_source_metadata,
2857 source_id = self.tbl_source_metadata.c.src_id,
2858 source = relation(DBSource),
2859 key_id = self.tbl_source_metadata.c.key_id,
2860 key = relation(MetadataKey),
2861 value = self.tbl_source_metadata.c.value))
2863 mapper(VersionCheck, self.tbl_version_check,
2865 suite_id = self.tbl_version_check.c.suite,
2866 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2867 reference_id = self.tbl_version_check.c.reference,
2868 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2870 ## Connection functions
2871 def __createconn(self):
2872 from config import Config
2874 if cnf.has_key("DB::Service"):
2875 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2876 elif cnf.has_key("DB::Host"):
2878 connstr = "postgresql://%s" % cnf["DB::Host"]
2879 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2880 connstr += ":%s" % cnf["DB::Port"]
2881 connstr += "/%s" % cnf["DB::Name"]
2884 connstr = "postgresql:///%s" % cnf["DB::Name"]
2885 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2886 connstr += "?port=%s" % cnf["DB::Port"]
2888 engine_args = { 'echo': self.debug }
2889 if cnf.has_key('DB::PoolSize'):
2890 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2891 if cnf.has_key('DB::MaxOverflow'):
2892 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2893 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
2894 cnf['DB::Unicode'] == 'false':
2895 engine_args['use_native_unicode'] = False
2897 # Monkey patch a new dialect in in order to support service= syntax
2898 import sqlalchemy.dialects.postgresql
2899 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2900 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2901 def create_connect_args(self, url):
2902 if str(url).startswith('postgresql://service='):
2904 servicename = str(url)[21:]
2905 return (['service=%s' % servicename], {})
2907 return PGDialect_psycopg2.create_connect_args(self, url)
2909 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2912 self.db_pg = create_engine(connstr, **engine_args)
2913 self.db_meta = MetaData()
2914 self.db_meta.bind = self.db_pg
2915 self.db_smaker = sessionmaker(bind=self.db_pg,
2919 self.__setuptables()
2920 self.__setupmappers()
2922 except OperationalError as e:
2924 utils.fubar("Cannot connect to database (%s)" % str(e))
2926 self.pid = os.getpid()
2928 def session(self, work_mem = 0):
2930 Returns a new session object. If a work_mem parameter is provided a new
2931 transaction is started and the work_mem parameter is set for this
2932 transaction. The work_mem parameter is measured in MB. A default value
2933 will be used if the parameter is not set.
2935 # reinitialize DBConn in new processes
2936 if self.pid != os.getpid():
2939 session = self.db_smaker()
2941 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2944 __all__.append('DBConn')