5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 warnings.filterwarnings('ignore', \
85 "Predicate of partial index .* ignored during reflection", \
89 ################################################################################
91 # Patch in support for the debversion field type so that it works during
95 # that is for sqlalchemy 0.6
96 UserDefinedType = sqltypes.UserDefinedType
98 # this one for sqlalchemy 0.5
99 UserDefinedType = sqltypes.TypeEngine
101 class DebVersion(UserDefinedType):
102 def get_col_spec(self):
105 def bind_processor(self, dialect):
108 # ' = None' is needed for sqlalchemy 0.5:
109 def result_processor(self, dialect, coltype = None):
112 sa_major_version = sqlalchemy.__version__[0:3]
113 if sa_major_version in ["0.5", "0.6", "0.7"]:
114 from sqlalchemy.databases import postgres
115 postgres.ischema_names['debversion'] = DebVersion
117 raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py")
119 ################################################################################
121 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
123 ################################################################################
125 def session_wrapper(fn):
127 Wrapper around common ".., session=None):" handling. If the wrapped
128 function is called without passing 'session', we create a local one
129 and destroy it when the function ends.
131 Also attaches a commit_or_flush method to the session; if we created a
132 local session, this is a synonym for session.commit(), otherwise it is a
133 synonym for session.flush().
136 def wrapped(*args, **kwargs):
137 private_transaction = False
139 # Find the session object
140 session = kwargs.get('session')
143 if len(args) <= len(getargspec(fn)[0]) - 1:
144 # No session specified as last argument or in kwargs
145 private_transaction = True
146 session = kwargs['session'] = DBConn().session()
148 # Session is last argument in args
152 session = args[-1] = DBConn().session()
153 private_transaction = True
155 if private_transaction:
156 session.commit_or_flush = session.commit
158 session.commit_or_flush = session.flush
161 return fn(*args, **kwargs)
163 if private_transaction:
164 # We created a session; close it.
167 wrapped.__doc__ = fn.__doc__
168 wrapped.func_name = fn.func_name
172 __all__.append('session_wrapper')
174 ################################################################################
176 class ORMObject(object):
178 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
179 derived classes must implement the properties() method.
182 def properties(self):
184 This method should be implemented by all derived classes and returns a
185 list of the important properties. The properties 'created' and
186 'modified' will be added automatically. A suffix '_count' should be
187 added to properties that are lists or query objects. The most important
188 property name should be returned as the first element in the list
189 because it is used by repr().
195 Returns a JSON representation of the object based on the properties
196 returned from the properties() method.
199 # add created and modified
200 all_properties = self.properties() + ['created', 'modified']
201 for property in all_properties:
202 # check for list or query
203 if property[-6:] == '_count':
204 real_property = property[:-6]
205 if not hasattr(self, real_property):
207 value = getattr(self, real_property)
208 if hasattr(value, '__len__'):
211 elif hasattr(value, 'count'):
212 # query (but not during validation)
213 if self.in_validation:
215 value = value.count()
217 raise KeyError('Do not understand property %s.' % property)
219 if not hasattr(self, property):
222 value = getattr(self, property)
226 elif isinstance(value, ORMObject):
227 # use repr() for ORMObject types
230 # we want a string for all other types because json cannot
233 data[property] = value
234 return json.dumps(data)
238 Returns the name of the class.
240 return type(self).__name__
244 Returns a short string representation of the object using the first
245 element from the properties() method.
247 primary_property = self.properties()[0]
248 value = getattr(self, primary_property)
249 return '<%s %s>' % (self.classname(), str(value))
253 Returns a human readable form of the object using the properties()
256 return '<%s %s>' % (self.classname(), self.json())
258 def not_null_constraints(self):
260 Returns a list of properties that must be not NULL. Derived classes
261 should override this method if needed.
265 validation_message = \
266 "Validation failed because property '%s' must not be empty in object\n%s"
268 in_validation = False
272 This function validates the not NULL constraints as returned by
273 not_null_constraints(). It raises the DBUpdateError exception if
276 for property in self.not_null_constraints():
277 # TODO: It is a bit awkward that the mapper configuration allow
278 # directly setting the numeric _id columns. We should get rid of it
280 if hasattr(self, property + '_id') and \
281 getattr(self, property + '_id') is not None:
283 if not hasattr(self, property) or getattr(self, property) is None:
284 # str() might lead to races due to a 2nd flush
285 self.in_validation = True
286 message = self.validation_message % (property, str(self))
287 self.in_validation = False
288 raise DBUpdateError(message)
292 def get(cls, primary_key, session = None):
294 This is a support function that allows getting an object by its primary
297 Architecture.get(3[, session])
299 instead of the more verbose
301 session.query(Architecture).get(3)
303 return session.query(cls).get(primary_key)
305 def session(self, replace = False):
307 Returns the current session that is associated with the object. May
308 return None is object is in detached state.
311 return object_session(self)
313 def clone(self, session = None):
315 Clones the current object in a new session and returns the new clone. A
316 fresh session is created if the optional session parameter is not
317 provided. The function will fail if a session is provided and has
320 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
321 an existing object to allow several threads to work with their own
322 instances of an ORMObject.
324 WARNING: Only persistent (committed) objects can be cloned. Changes
325 made to the original object that are not committed yet will get lost.
326 The session of the new object will always be rolled back to avoid
330 if self.session() is None:
331 raise RuntimeError( \
332 'Method clone() failed for detached object:\n%s' % self)
333 self.session().flush()
334 mapper = object_mapper(self)
335 primary_key = mapper.primary_key_from_instance(self)
336 object_class = self.__class__
338 session = DBConn().session()
339 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
340 raise RuntimeError( \
341 'Method clone() failed due to unflushed changes in session.')
342 new_object = session.query(object_class).get(primary_key)
344 if new_object is None:
345 raise RuntimeError( \
346 'Method clone() failed for non-persistent object:\n%s' % self)
349 __all__.append('ORMObject')
351 ################################################################################
353 class Validator(MapperExtension):
355 This class calls the validate() method for each instance for the
356 'before_update' and 'before_insert' events. A global object validator is
357 used for configuring the individual mappers.
360 def before_update(self, mapper, connection, instance):
364 def before_insert(self, mapper, connection, instance):
368 validator = Validator()
370 ################################################################################
372 class ACL(ORMObject):
374 return "<ACL {0}>".format(self.name)
376 __all__.append('ACL')
378 class ACLPerSource(ORMObject):
380 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
382 __all__.append('ACLPerSource')
384 ################################################################################
386 class Architecture(ORMObject):
387 def __init__(self, arch_string = None, description = None):
388 self.arch_string = arch_string
389 self.description = description
391 def __eq__(self, val):
392 if isinstance(val, str):
393 return (self.arch_string== val)
394 # This signals to use the normal comparison operator
395 return NotImplemented
397 def __ne__(self, val):
398 if isinstance(val, str):
399 return (self.arch_string != val)
400 # This signals to use the normal comparison operator
401 return NotImplemented
403 def properties(self):
404 return ['arch_string', 'arch_id', 'suites_count']
406 def not_null_constraints(self):
407 return ['arch_string']
409 __all__.append('Architecture')
412 def get_architecture(architecture, session=None):
414 Returns database id for given C{architecture}.
416 @type architecture: string
417 @param architecture: The name of the architecture
419 @type session: Session
420 @param session: Optional SQLA session object (a temporary one will be
421 generated if not supplied)
424 @return: Architecture object for the given arch (None if not present)
427 q = session.query(Architecture).filter_by(arch_string=architecture)
431 except NoResultFound:
434 __all__.append('get_architecture')
436 # TODO: should be removed because the implementation is too trivial
438 def get_architecture_suites(architecture, session=None):
440 Returns list of Suite objects for given C{architecture} name
442 @type architecture: str
443 @param architecture: Architecture name to search for
445 @type session: Session
446 @param session: Optional SQL session object (a temporary one will be
447 generated if not supplied)
450 @return: list of Suite objects for the given name (may be empty)
453 return get_architecture(architecture, session).suites
455 __all__.append('get_architecture_suites')
457 ################################################################################
459 class Archive(object):
460 def __init__(self, *args, **kwargs):
464 return '<Archive %s>' % self.archive_name
466 __all__.append('Archive')
469 def get_archive(archive, session=None):
471 returns database id for given C{archive}.
473 @type archive: string
474 @param archive: the name of the arhive
476 @type session: Session
477 @param session: Optional SQLA session object (a temporary one will be
478 generated if not supplied)
481 @return: Archive object for the given name (None if not present)
484 archive = archive.lower()
486 q = session.query(Archive).filter_by(archive_name=archive)
490 except NoResultFound:
493 __all__.append('get_archive')
495 ################################################################################
497 class ArchiveFile(object):
498 def __init__(self, archive=None, component=None, file=None):
499 self.archive = archive
500 self.component = component
504 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
506 __all__.append('ArchiveFile')
508 ################################################################################
510 class BinContents(ORMObject):
511 def __init__(self, file = None, binary = None):
515 def properties(self):
516 return ['file', 'binary']
518 __all__.append('BinContents')
520 ################################################################################
522 def subprocess_setup():
523 # Python installs a SIGPIPE handler by default. This is usually not what
524 # non-Python subprocesses expect.
525 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
527 class DBBinary(ORMObject):
528 def __init__(self, package = None, source = None, version = None, \
529 maintainer = None, architecture = None, poolfile = None, \
530 binarytype = 'deb', fingerprint=None):
531 self.package = package
533 self.version = version
534 self.maintainer = maintainer
535 self.architecture = architecture
536 self.poolfile = poolfile
537 self.binarytype = binarytype
538 self.fingerprint = fingerprint
542 return self.binary_id
544 def properties(self):
545 return ['package', 'version', 'maintainer', 'source', 'architecture', \
546 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
547 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
549 def not_null_constraints(self):
550 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
553 metadata = association_proxy('key', 'value')
555 def scan_contents(self):
557 Yields the contents of the package. Only regular files are yielded and
558 the path names are normalized after converting them from either utf-8
559 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
560 package does not contain any regular file.
562 fullpath = self.poolfile.fullpath
563 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
564 preexec_fn = subprocess_setup)
565 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
566 for member in tar.getmembers():
567 if not member.isdir():
568 name = normpath(member.name)
569 # enforce proper utf-8 encoding
572 except UnicodeDecodeError:
573 name = name.decode('iso8859-1').encode('utf-8')
579 def read_control(self):
581 Reads the control information from a binary.
584 @return: stanza text of the control section.
587 fullpath = self.poolfile.fullpath
588 deb_file = open(fullpath, 'r')
589 stanza = utils.deb_extract_control(deb_file)
594 def read_control_fields(self):
596 Reads the control information from a binary and return
600 @return: fields of the control section as a dictionary.
603 stanza = self.read_control()
604 return apt_pkg.TagSection(stanza)
606 __all__.append('DBBinary')
609 def get_suites_binary_in(package, session=None):
611 Returns list of Suite objects which given C{package} name is in
614 @param package: DBBinary package name to search for
617 @return: list of Suite objects for the given package
620 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
622 __all__.append('get_suites_binary_in')
625 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
627 Returns the component name of the newest binary package in suite_list or
628 None if no package is found. The result can be optionally filtered by a list
629 of architecture names.
632 @param package: DBBinary package name to search for
634 @type suite_list: list of str
635 @param suite_list: list of suite_name items
637 @type arch_list: list of str
638 @param arch_list: optional list of arch_string items that defaults to []
640 @rtype: str or NoneType
641 @return: name of component or None
644 q = session.query(DBBinary).filter_by(package = package). \
645 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
646 if len(arch_list) > 0:
647 q = q.join(DBBinary.architecture). \
648 filter(Architecture.arch_string.in_(arch_list))
649 binary = q.order_by(desc(DBBinary.version)).first()
653 return binary.poolfile.component.component_name
655 __all__.append('get_component_by_package_suite')
657 ################################################################################
659 class BuildQueue(object):
660 def __init__(self, *args, **kwargs):
664 return '<BuildQueue %s>' % self.queue_name
666 __all__.append('BuildQueue')
668 ################################################################################
670 class Component(ORMObject):
671 def __init__(self, component_name = None):
672 self.component_name = component_name
674 def __eq__(self, val):
675 if isinstance(val, str):
676 return (self.component_name == val)
677 # This signals to use the normal comparison operator
678 return NotImplemented
680 def __ne__(self, val):
681 if isinstance(val, str):
682 return (self.component_name != val)
683 # This signals to use the normal comparison operator
684 return NotImplemented
686 def properties(self):
687 return ['component_name', 'component_id', 'description', \
688 'meets_dfsg', 'overrides_count']
690 def not_null_constraints(self):
691 return ['component_name']
694 __all__.append('Component')
697 def get_component(component, session=None):
699 Returns database id for given C{component}.
701 @type component: string
702 @param component: The name of the override type
705 @return: the database id for the given component
708 component = component.lower()
710 q = session.query(Component).filter_by(component_name=component)
714 except NoResultFound:
717 __all__.append('get_component')
720 def get_mapped_component(component_name, session=None):
721 """get component after mappings
723 Evaluate component mappings from ComponentMappings in dak.conf for the
724 given component name.
726 @todo: ansgar wants to get rid of this. It's currently only used for
729 @type component_name: str
730 @param component_name: component name
732 @param session: database session
734 @rtype: L{daklib.dbconn.Component} or C{None}
735 @return: component after applying maps or C{None}
738 for m in cnf.value_list("ComponentMappings"):
739 (src, dst) = m.split()
740 if component_name == src:
742 component = session.query(Component).filter_by(component_name=component_name).first()
745 __all__.append('get_mapped_component')
748 def get_component_names(session=None):
750 Returns list of strings of component names.
753 @return: list of strings of component names
756 return [ x.component_name for x in session.query(Component).all() ]
758 __all__.append('get_component_names')
760 ################################################################################
762 class DBConfig(object):
763 def __init__(self, *args, **kwargs):
767 return '<DBConfig %s>' % self.name
769 __all__.append('DBConfig')
771 ################################################################################
774 def get_or_set_contents_file_id(filename, session=None):
776 Returns database id for given filename.
778 If no matching file is found, a row is inserted.
780 @type filename: string
781 @param filename: The filename
782 @type session: SQLAlchemy
783 @param session: Optional SQL session object (a temporary one will be
784 generated if not supplied). If not passed, a commit will be performed at
785 the end of the function, otherwise the caller is responsible for commiting.
788 @return: the database id for the given component
791 q = session.query(ContentFilename).filter_by(filename=filename)
794 ret = q.one().cafilename_id
795 except NoResultFound:
796 cf = ContentFilename()
797 cf.filename = filename
799 session.commit_or_flush()
800 ret = cf.cafilename_id
804 __all__.append('get_or_set_contents_file_id')
807 def get_contents(suite, overridetype, section=None, session=None):
809 Returns contents for a suite / overridetype combination, limiting
810 to a section if not None.
813 @param suite: Suite object
815 @type overridetype: OverrideType
816 @param overridetype: OverrideType object
818 @type section: Section
819 @param section: Optional section object to limit results to
821 @type session: SQLAlchemy
822 @param session: Optional SQL session object (a temporary one will be
823 generated if not supplied)
826 @return: ResultsProxy object set up to return tuples of (filename, section,
830 # find me all of the contents for a given suite
831 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
835 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
836 JOIN content_file_names n ON (c.filename=n.id)
837 JOIN binaries b ON (b.id=c.binary_pkg)
838 JOIN override o ON (o.package=b.package)
839 JOIN section s ON (s.id=o.section)
840 WHERE o.suite = :suiteid AND o.type = :overridetypeid
841 AND b.type=:overridetypename"""
843 vals = {'suiteid': suite.suite_id,
844 'overridetypeid': overridetype.overridetype_id,
845 'overridetypename': overridetype.overridetype}
847 if section is not None:
848 contents_q += " AND s.id = :sectionid"
849 vals['sectionid'] = section.section_id
851 contents_q += " ORDER BY fn"
853 return session.execute(contents_q, vals)
855 __all__.append('get_contents')
857 ################################################################################
859 class ContentFilepath(object):
860 def __init__(self, *args, **kwargs):
864 return '<ContentFilepath %s>' % self.filepath
866 __all__.append('ContentFilepath')
869 def get_or_set_contents_path_id(filepath, session=None):
871 Returns database id for given path.
873 If no matching file is found, a row is inserted.
875 @type filepath: string
876 @param filepath: The filepath
878 @type session: SQLAlchemy
879 @param session: Optional SQL session object (a temporary one will be
880 generated if not supplied). If not passed, a commit will be performed at
881 the end of the function, otherwise the caller is responsible for commiting.
884 @return: the database id for the given path
887 q = session.query(ContentFilepath).filter_by(filepath=filepath)
890 ret = q.one().cafilepath_id
891 except NoResultFound:
892 cf = ContentFilepath()
893 cf.filepath = filepath
895 session.commit_or_flush()
896 ret = cf.cafilepath_id
900 __all__.append('get_or_set_contents_path_id')
902 ################################################################################
904 class ContentAssociation(object):
905 def __init__(self, *args, **kwargs):
909 return '<ContentAssociation %s>' % self.ca_id
911 __all__.append('ContentAssociation')
913 def insert_content_paths(binary_id, fullpaths, session=None):
915 Make sure given path is associated with given binary id
918 @param binary_id: the id of the binary
919 @type fullpaths: list
920 @param fullpaths: the list of paths of the file being associated with the binary
921 @type session: SQLAlchemy session
922 @param session: Optional SQLAlchemy session. If this is passed, the caller
923 is responsible for ensuring a transaction has begun and committing the
924 results or rolling back based on the result code. If not passed, a commit
925 will be performed at the end of the function, otherwise the caller is
926 responsible for commiting.
928 @return: True upon success
933 session = DBConn().session()
938 def generate_path_dicts():
939 for fullpath in fullpaths:
940 if fullpath.startswith( './' ):
941 fullpath = fullpath[2:]
943 yield {'filename':fullpath, 'id': binary_id }
945 for d in generate_path_dicts():
946 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
955 traceback.print_exc()
957 # Only rollback if we set up the session ourself
964 __all__.append('insert_content_paths')
966 ################################################################################
968 class DSCFile(object):
969 def __init__(self, *args, **kwargs):
973 return '<DSCFile %s>' % self.dscfile_id
975 __all__.append('DSCFile')
978 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
980 Returns a list of DSCFiles which may be empty
982 @type dscfile_id: int (optional)
983 @param dscfile_id: the dscfile_id of the DSCFiles to find
985 @type source_id: int (optional)
986 @param source_id: the source id related to the DSCFiles to find
988 @type poolfile_id: int (optional)
989 @param poolfile_id: the poolfile id related to the DSCFiles to find
992 @return: Possibly empty list of DSCFiles
995 q = session.query(DSCFile)
997 if dscfile_id is not None:
998 q = q.filter_by(dscfile_id=dscfile_id)
1000 if source_id is not None:
1001 q = q.filter_by(source_id=source_id)
1003 if poolfile_id is not None:
1004 q = q.filter_by(poolfile_id=poolfile_id)
1008 __all__.append('get_dscfiles')
1010 ################################################################################
1012 class ExternalOverride(ORMObject):
1013 def __init__(self, *args, **kwargs):
1017 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1019 __all__.append('ExternalOverride')
1021 ################################################################################
1023 class PoolFile(ORMObject):
1024 def __init__(self, filename = None, filesize = -1, \
1026 self.filename = filename
1027 self.filesize = filesize
1028 self.md5sum = md5sum
1032 session = DBConn().session().object_session(self)
1033 af = session.query(ArchiveFile).join(Archive) \
1034 .filter(ArchiveFile.file == self) \
1035 .order_by(Archive.tainted.desc()).first()
1039 def component(self):
1040 session = DBConn().session().object_session(self)
1041 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1042 .group_by(ArchiveFile.component_id).one()
1043 return session.query(Component).get(component_id)
1047 return os.path.basename(self.filename)
1049 def is_valid(self, filesize = -1, md5sum = None):
1050 return self.filesize == long(filesize) and self.md5sum == md5sum
1052 def properties(self):
1053 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1054 'sha256sum', 'source', 'binary', 'last_used']
1056 def not_null_constraints(self):
1057 return ['filename', 'md5sum']
1059 def identical_to(self, filename):
1061 compare size and hash with the given file
1064 @return: true if the given file has the same size and hash as this object; false otherwise
1066 st = os.stat(filename)
1067 if self.filesize != st.st_size:
1070 f = open(filename, "r")
1071 sha256sum = apt_pkg.sha256sum(f)
1072 if sha256sum != self.sha256sum:
1077 __all__.append('PoolFile')
1080 def get_poolfile_like_name(filename, session=None):
1082 Returns an array of PoolFile objects which are like the given name
1084 @type filename: string
1085 @param filename: the filename of the file to check against the DB
1088 @return: array of PoolFile objects
1091 # TODO: There must be a way of properly using bind parameters with %FOO%
1092 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1096 __all__.append('get_poolfile_like_name')
1098 ################################################################################
1100 class Fingerprint(ORMObject):
1101 def __init__(self, fingerprint = None):
1102 self.fingerprint = fingerprint
1104 def properties(self):
1105 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1108 def not_null_constraints(self):
1109 return ['fingerprint']
1111 __all__.append('Fingerprint')
1114 def get_fingerprint(fpr, session=None):
1116 Returns Fingerprint object for given fpr.
1119 @param fpr: The fpr to find / add
1121 @type session: SQLAlchemy
1122 @param session: Optional SQL session object (a temporary one will be
1123 generated if not supplied).
1126 @return: the Fingerprint object for the given fpr or None
1129 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1133 except NoResultFound:
1138 __all__.append('get_fingerprint')
1141 def get_or_set_fingerprint(fpr, session=None):
1143 Returns Fingerprint object for given fpr.
1145 If no matching fpr is found, a row is inserted.
1148 @param fpr: The fpr to find / add
1150 @type session: SQLAlchemy
1151 @param session: Optional SQL session object (a temporary one will be
1152 generated if not supplied). If not passed, a commit will be performed at
1153 the end of the function, otherwise the caller is responsible for commiting.
1154 A flush will be performed either way.
1157 @return: the Fingerprint object for the given fpr
1160 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1164 except NoResultFound:
1165 fingerprint = Fingerprint()
1166 fingerprint.fingerprint = fpr
1167 session.add(fingerprint)
1168 session.commit_or_flush()
1173 __all__.append('get_or_set_fingerprint')
1175 ################################################################################
1177 # Helper routine for Keyring class
1178 def get_ldap_name(entry):
1180 for k in ["cn", "mn", "sn"]:
1182 if ret and ret[0] != "" and ret[0] != "-":
1184 return " ".join(name)
1186 ################################################################################
1188 class Keyring(object):
1189 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1190 " --with-colons --fingerprint --fingerprint"
1195 def __init__(self, *args, **kwargs):
1199 return '<Keyring %s>' % self.keyring_name
1201 def de_escape_gpg_str(self, txt):
1202 esclist = re.split(r'(\\x..)', txt)
1203 for x in range(1,len(esclist),2):
1204 esclist[x] = "%c" % (int(esclist[x][2:],16))
1205 return "".join(esclist)
1207 def parse_address(self, uid):
1208 """parses uid and returns a tuple of real name and email address"""
1210 (name, address) = email.Utils.parseaddr(uid)
1211 name = re.sub(r"\s*[(].*[)]", "", name)
1212 name = self.de_escape_gpg_str(name)
1215 return (name, address)
1217 def load_keys(self, keyring):
1218 if not self.keyring_id:
1219 raise Exception('Must be initialized with database information')
1221 k = os.popen(self.gpg_invocation % keyring, "r")
1226 field = line.split(":")
1227 if field[0] == "pub":
1230 (name, addr) = self.parse_address(field[9])
1232 self.keys[key]["email"] = addr
1233 self.keys[key]["name"] = name
1234 self.keys[key]["fingerprints"] = []
1236 elif key and field[0] == "sub" and len(field) >= 12:
1237 signingkey = ("s" in field[11])
1238 elif key and field[0] == "uid":
1239 (name, addr) = self.parse_address(field[9])
1240 if "email" not in self.keys[key] and "@" in addr:
1241 self.keys[key]["email"] = addr
1242 self.keys[key]["name"] = name
1243 elif signingkey and field[0] == "fpr":
1244 self.keys[key]["fingerprints"].append(field[9])
1245 self.fpr_lookup[field[9]] = key
1247 def import_users_from_ldap(self, session):
1251 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1252 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1254 l = ldap.open(LDAPServer)
1255 l.simple_bind_s("","")
1256 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1257 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1258 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1260 ldap_fin_uid_id = {}
1267 uid = entry["uid"][0]
1268 name = get_ldap_name(entry)
1269 fingerprints = entry["keyFingerPrint"]
1271 for f in fingerprints:
1272 key = self.fpr_lookup.get(f, None)
1273 if key not in self.keys:
1275 self.keys[key]["uid"] = uid
1279 keyid = get_or_set_uid(uid, session).uid_id
1280 byuid[keyid] = (uid, name)
1281 byname[uid] = (keyid, name)
1283 return (byname, byuid)
1285 def generate_users_from_keyring(self, format, session):
1289 for x in self.keys.keys():
1290 if "email" not in self.keys[x]:
1292 self.keys[x]["uid"] = format % "invalid-uid"
1294 uid = format % self.keys[x]["email"]
1295 keyid = get_or_set_uid(uid, session).uid_id
1296 byuid[keyid] = (uid, self.keys[x]["name"])
1297 byname[uid] = (keyid, self.keys[x]["name"])
1298 self.keys[x]["uid"] = uid
1301 uid = format % "invalid-uid"
1302 keyid = get_or_set_uid(uid, session).uid_id
1303 byuid[keyid] = (uid, "ungeneratable user id")
1304 byname[uid] = (keyid, "ungeneratable user id")
1306 return (byname, byuid)
1308 __all__.append('Keyring')
1311 def get_keyring(keyring, session=None):
1313 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1314 If C{keyring} already has an entry, simply return the existing Keyring
1316 @type keyring: string
1317 @param keyring: the keyring name
1320 @return: the Keyring object for this keyring
1323 q = session.query(Keyring).filter_by(keyring_name=keyring)
1327 except NoResultFound:
1330 __all__.append('get_keyring')
1333 def get_active_keyring_paths(session=None):
1336 @return: list of active keyring paths
1338 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1340 __all__.append('get_active_keyring_paths')
1343 def get_primary_keyring_path(session=None):
1345 Get the full path to the highest priority active keyring
1348 @return: path to the active keyring with the highest priority or None if no
1349 keyring is configured
1351 keyrings = get_active_keyring_paths()
1353 if len(keyrings) > 0:
1358 __all__.append('get_primary_keyring_path')
1360 ################################################################################
1362 class DBChange(object):
1363 def __init__(self, *args, **kwargs):
1367 return '<DBChange %s>' % self.changesname
1369 __all__.append('DBChange')
1372 def get_dbchange(filename, session=None):
1374 returns DBChange object for given C{filename}.
1376 @type filename: string
1377 @param filename: the name of the file
1379 @type session: Session
1380 @param session: Optional SQLA session object (a temporary one will be
1381 generated if not supplied)
1384 @return: DBChange object for the given filename (C{None} if not present)
1387 q = session.query(DBChange).filter_by(changesname=filename)
1391 except NoResultFound:
1394 __all__.append('get_dbchange')
1396 ################################################################################
1398 class Maintainer(ORMObject):
1399 def __init__(self, name = None):
1402 def properties(self):
1403 return ['name', 'maintainer_id']
1405 def not_null_constraints(self):
1408 def get_split_maintainer(self):
1409 if not hasattr(self, 'name') or self.name is None:
1410 return ('', '', '', '')
1412 return fix_maintainer(self.name.strip())
1414 __all__.append('Maintainer')
1417 def get_or_set_maintainer(name, session=None):
1419 Returns Maintainer object for given maintainer name.
1421 If no matching maintainer name is found, a row is inserted.
1424 @param name: The maintainer name to add
1426 @type session: SQLAlchemy
1427 @param session: Optional SQL session object (a temporary one will be
1428 generated if not supplied). If not passed, a commit will be performed at
1429 the end of the function, otherwise the caller is responsible for commiting.
1430 A flush will be performed either way.
1433 @return: the Maintainer object for the given maintainer
1436 q = session.query(Maintainer).filter_by(name=name)
1439 except NoResultFound:
1440 maintainer = Maintainer()
1441 maintainer.name = name
1442 session.add(maintainer)
1443 session.commit_or_flush()
1448 __all__.append('get_or_set_maintainer')
1451 def get_maintainer(maintainer_id, session=None):
1453 Return the name of the maintainer behind C{maintainer_id} or None if that
1454 maintainer_id is invalid.
1456 @type maintainer_id: int
1457 @param maintainer_id: the id of the maintainer
1460 @return: the Maintainer with this C{maintainer_id}
1463 return session.query(Maintainer).get(maintainer_id)
1465 __all__.append('get_maintainer')
1467 ################################################################################
1469 class NewComment(object):
1470 def __init__(self, *args, **kwargs):
1474 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1476 __all__.append('NewComment')
1479 def has_new_comment(policy_queue, package, version, session=None):
1481 Returns true if the given combination of C{package}, C{version} has a comment.
1483 @type package: string
1484 @param package: name of the package
1486 @type version: string
1487 @param version: package version
1489 @type session: Session
1490 @param session: Optional SQLA session object (a temporary one will be
1491 generated if not supplied)
1497 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1498 q = q.filter_by(package=package)
1499 q = q.filter_by(version=version)
1501 return bool(q.count() > 0)
1503 __all__.append('has_new_comment')
1506 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1508 Returns (possibly empty) list of NewComment objects for the given
1511 @type package: string (optional)
1512 @param package: name of the package
1514 @type version: string (optional)
1515 @param version: package version
1517 @type comment_id: int (optional)
1518 @param comment_id: An id of a comment
1520 @type session: Session
1521 @param session: Optional SQLA session object (a temporary one will be
1522 generated if not supplied)
1525 @return: A (possibly empty) list of NewComment objects will be returned
1528 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1529 if package is not None: q = q.filter_by(package=package)
1530 if version is not None: q = q.filter_by(version=version)
1531 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1535 __all__.append('get_new_comments')
1537 ################################################################################
1539 class Override(ORMObject):
1540 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1541 section = None, priority = None):
1542 self.package = package
1544 self.component = component
1545 self.overridetype = overridetype
1546 self.section = section
1547 self.priority = priority
1549 def properties(self):
1550 return ['package', 'suite', 'component', 'overridetype', 'section', \
1553 def not_null_constraints(self):
1554 return ['package', 'suite', 'component', 'overridetype', 'section']
1556 __all__.append('Override')
1559 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1561 Returns Override object for the given parameters
1563 @type package: string
1564 @param package: The name of the package
1566 @type suite: string, list or None
1567 @param suite: The name of the suite (or suites if a list) to limit to. If
1568 None, don't limit. Defaults to None.
1570 @type component: string, list or None
1571 @param component: The name of the component (or components if a list) to
1572 limit to. If None, don't limit. Defaults to None.
1574 @type overridetype: string, list or None
1575 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1576 limit to. If None, don't limit. Defaults to None.
1578 @type session: Session
1579 @param session: Optional SQLA session object (a temporary one will be
1580 generated if not supplied)
1583 @return: A (possibly empty) list of Override objects will be returned
1586 q = session.query(Override)
1587 q = q.filter_by(package=package)
1589 if suite is not None:
1590 if not isinstance(suite, list): suite = [suite]
1591 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1593 if component is not None:
1594 if not isinstance(component, list): component = [component]
1595 q = q.join(Component).filter(Component.component_name.in_(component))
1597 if overridetype is not None:
1598 if not isinstance(overridetype, list): overridetype = [overridetype]
1599 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1603 __all__.append('get_override')
1606 ################################################################################
1608 class OverrideType(ORMObject):
1609 def __init__(self, overridetype = None):
1610 self.overridetype = overridetype
1612 def properties(self):
1613 return ['overridetype', 'overridetype_id', 'overrides_count']
1615 def not_null_constraints(self):
1616 return ['overridetype']
1618 __all__.append('OverrideType')
1621 def get_override_type(override_type, session=None):
1623 Returns OverrideType object for given C{override type}.
1625 @type override_type: string
1626 @param override_type: The name of the override type
1628 @type session: Session
1629 @param session: Optional SQLA session object (a temporary one will be
1630 generated if not supplied)
1633 @return: the database id for the given override type
1636 q = session.query(OverrideType).filter_by(overridetype=override_type)
1640 except NoResultFound:
1643 __all__.append('get_override_type')
1645 ################################################################################
1647 class PolicyQueue(object):
1648 def __init__(self, *args, **kwargs):
1652 return '<PolicyQueue %s>' % self.queue_name
1654 __all__.append('PolicyQueue')
1657 def get_policy_queue(queuename, session=None):
1659 Returns PolicyQueue object for given C{queue name}
1661 @type queuename: string
1662 @param queuename: The name of the queue
1664 @type session: Session
1665 @param session: Optional SQLA session object (a temporary one will be
1666 generated if not supplied)
1669 @return: PolicyQueue object for the given queue
1672 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1676 except NoResultFound:
1679 __all__.append('get_policy_queue')
1681 ################################################################################
1683 class PolicyQueueUpload(object):
1684 def __cmp__(self, other):
1685 ret = cmp(self.changes.source, other.changes.source)
1687 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1689 if self.source is not None and other.source is None:
1691 elif self.source is None and other.source is not None:
1694 ret = cmp(self.changes.changesname, other.changes.changesname)
1697 __all__.append('PolicyQueueUpload')
1699 ################################################################################
1701 class PolicyQueueByhandFile(object):
1704 __all__.append('PolicyQueueByhandFile')
1706 ################################################################################
1708 class Priority(ORMObject):
1709 def __init__(self, priority = None, level = None):
1710 self.priority = priority
1713 def properties(self):
1714 return ['priority', 'priority_id', 'level', 'overrides_count']
1716 def not_null_constraints(self):
1717 return ['priority', 'level']
1719 def __eq__(self, val):
1720 if isinstance(val, str):
1721 return (self.priority == val)
1722 # This signals to use the normal comparison operator
1723 return NotImplemented
1725 def __ne__(self, val):
1726 if isinstance(val, str):
1727 return (self.priority != val)
1728 # This signals to use the normal comparison operator
1729 return NotImplemented
1731 __all__.append('Priority')
1734 def get_priority(priority, session=None):
1736 Returns Priority object for given C{priority name}.
1738 @type priority: string
1739 @param priority: The name of the priority
1741 @type session: Session
1742 @param session: Optional SQLA session object (a temporary one will be
1743 generated if not supplied)
1746 @return: Priority object for the given priority
1749 q = session.query(Priority).filter_by(priority=priority)
1753 except NoResultFound:
1756 __all__.append('get_priority')
1759 def get_priorities(session=None):
1761 Returns dictionary of priority names -> id mappings
1763 @type session: Session
1764 @param session: Optional SQL session object (a temporary one will be
1765 generated if not supplied)
1768 @return: dictionary of priority names -> id mappings
1772 q = session.query(Priority)
1774 ret[x.priority] = x.priority_id
1778 __all__.append('get_priorities')
1780 ################################################################################
1782 class Section(ORMObject):
1783 def __init__(self, section = None):
1784 self.section = section
1786 def properties(self):
1787 return ['section', 'section_id', 'overrides_count']
1789 def not_null_constraints(self):
1792 def __eq__(self, val):
1793 if isinstance(val, str):
1794 return (self.section == val)
1795 # This signals to use the normal comparison operator
1796 return NotImplemented
1798 def __ne__(self, val):
1799 if isinstance(val, str):
1800 return (self.section != val)
1801 # This signals to use the normal comparison operator
1802 return NotImplemented
1804 __all__.append('Section')
1807 def get_section(section, session=None):
1809 Returns Section object for given C{section name}.
1811 @type section: string
1812 @param section: The name of the section
1814 @type session: Session
1815 @param session: Optional SQLA session object (a temporary one will be
1816 generated if not supplied)
1819 @return: Section object for the given section name
1822 q = session.query(Section).filter_by(section=section)
1826 except NoResultFound:
1829 __all__.append('get_section')
1832 def get_sections(session=None):
1834 Returns dictionary of section names -> id mappings
1836 @type session: Session
1837 @param session: Optional SQL session object (a temporary one will be
1838 generated if not supplied)
1841 @return: dictionary of section names -> id mappings
1845 q = session.query(Section)
1847 ret[x.section] = x.section_id
1851 __all__.append('get_sections')
1853 ################################################################################
1855 class SignatureHistory(ORMObject):
1857 def from_signed_file(cls, signed_file):
1858 """signature history entry from signed file
1860 @type signed_file: L{daklib.gpg.SignedFile}
1861 @param signed_file: signed file
1863 @rtype: L{SignatureHistory}
1866 self.fingerprint = signed_file.primary_fingerprint
1867 self.signature_timestamp = signed_file.signature_timestamp
1868 self.contents_sha1 = signed_file.contents_sha1()
1871 __all__.append('SignatureHistory')
1873 ################################################################################
1875 class SrcContents(ORMObject):
1876 def __init__(self, file = None, source = None):
1878 self.source = source
1880 def properties(self):
1881 return ['file', 'source']
1883 __all__.append('SrcContents')
1885 ################################################################################
1887 from debian.debfile import Deb822
1889 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1890 class Dak822(Deb822):
1891 def _internal_parser(self, sequence, fields=None):
1892 # The key is non-whitespace, non-colon characters before any colon.
1893 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1894 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1895 multi = re.compile(key_part + r"$")
1896 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1898 wanted_field = lambda f: fields is None or f in fields
1900 if isinstance(sequence, basestring):
1901 sequence = sequence.splitlines()
1905 for line in self.gpg_stripped_paragraph(sequence):
1906 m = single.match(line)
1909 self[curkey] = content
1911 if not wanted_field(m.group('key')):
1915 curkey = m.group('key')
1916 content = m.group('data')
1919 m = multi.match(line)
1922 self[curkey] = content
1924 if not wanted_field(m.group('key')):
1928 curkey = m.group('key')
1932 m = multidata.match(line)
1934 content += '\n' + line # XXX not m.group('data')?
1938 self[curkey] = content
1941 class DBSource(ORMObject):
1942 def __init__(self, source = None, version = None, maintainer = None, \
1943 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1944 self.source = source
1945 self.version = version
1946 self.maintainer = maintainer
1947 self.changedby = changedby
1948 self.poolfile = poolfile
1949 self.install_date = install_date
1950 self.fingerprint = fingerprint
1954 return self.source_id
1956 def properties(self):
1957 return ['source', 'source_id', 'maintainer', 'changedby', \
1958 'fingerprint', 'poolfile', 'version', 'suites_count', \
1959 'install_date', 'binaries_count', 'uploaders_count']
1961 def not_null_constraints(self):
1962 return ['source', 'version', 'install_date', 'maintainer', \
1963 'changedby', 'poolfile']
1965 def read_control_fields(self):
1967 Reads the control information from a dsc
1970 @return: fields is the dsc information in a dictionary form
1972 fullpath = self.poolfile.fullpath
1973 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1976 metadata = association_proxy('key', 'value')
1978 def scan_contents(self):
1980 Returns a set of names for non directories. The path names are
1981 normalized after converting them from either utf-8 or iso8859-1
1984 fullpath = self.poolfile.fullpath
1985 from daklib.contents import UnpackedSource
1986 unpacked = UnpackedSource(fullpath)
1988 for name in unpacked.get_all_filenames():
1989 # enforce proper utf-8 encoding
1991 name.decode('utf-8')
1992 except UnicodeDecodeError:
1993 name = name.decode('iso8859-1').encode('utf-8')
1997 __all__.append('DBSource')
2000 def source_exists(source, source_version, suites = ["any"], session=None):
2002 Ensure that source exists somewhere in the archive for the binary
2003 upload being processed.
2004 1. exact match => 1.0-3
2005 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2007 @type source: string
2008 @param source: source name
2010 @type source_version: string
2011 @param source_version: expected source version
2014 @param suites: list of suites to check in, default I{any}
2016 @type session: Session
2017 @param session: Optional SQLA session object (a temporary one will be
2018 generated if not supplied)
2021 @return: returns 1 if a source with expected version is found, otherwise 0
2028 from daklib.regexes import re_bin_only_nmu
2029 orig_source_version = re_bin_only_nmu.sub('', source_version)
2031 for suite in suites:
2032 q = session.query(DBSource).filter_by(source=source). \
2033 filter(DBSource.version.in_([source_version, orig_source_version]))
2035 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2036 s = get_suite(suite, session)
2038 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2039 considered_suites = [ vc.reference for vc in enhances_vcs ]
2040 considered_suites.append(s)
2042 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2047 # No source found so return not ok
2052 __all__.append('source_exists')
2055 def get_suites_source_in(source, session=None):
2057 Returns list of Suite objects which given C{source} name is in
2060 @param source: DBSource package name to search for
2063 @return: list of Suite objects for the given source
2066 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2068 __all__.append('get_suites_source_in')
2071 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2073 Returns list of DBSource objects for given C{source} name and other parameters
2076 @param source: DBSource package name to search for
2078 @type version: str or None
2079 @param version: DBSource version name to search for or None if not applicable
2081 @type dm_upload_allowed: bool
2082 @param dm_upload_allowed: If None, no effect. If True or False, only
2083 return packages with that dm_upload_allowed setting
2085 @type session: Session
2086 @param session: Optional SQL session object (a temporary one will be
2087 generated if not supplied)
2090 @return: list of DBSource objects for the given name (may be empty)
2093 q = session.query(DBSource).filter_by(source=source)
2095 if version is not None:
2096 q = q.filter_by(version=version)
2098 if dm_upload_allowed is not None:
2099 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2103 __all__.append('get_sources_from_name')
2105 # FIXME: This function fails badly if it finds more than 1 source package and
2106 # its implementation is trivial enough to be inlined.
2108 def get_source_in_suite(source, suite, session=None):
2110 Returns a DBSource object for a combination of C{source} and C{suite}.
2112 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2113 - B{suite} - a suite name, eg. I{unstable}
2115 @type source: string
2116 @param source: source package name
2119 @param suite: the suite name
2122 @return: the version for I{source} in I{suite}
2126 q = get_suite(suite, session).get_sources(source)
2129 except NoResultFound:
2132 __all__.append('get_source_in_suite')
2135 def import_metadata_into_db(obj, session=None):
2137 This routine works on either DBBinary or DBSource objects and imports
2138 their metadata into the database
2140 fields = obj.read_control_fields()
2141 for k in fields.keys():
2144 val = str(fields[k])
2145 except UnicodeEncodeError:
2146 # Fall back to UTF-8
2148 val = fields[k].encode('utf-8')
2149 except UnicodeEncodeError:
2150 # Finally try iso8859-1
2151 val = fields[k].encode('iso8859-1')
2152 # Otherwise we allow the exception to percolate up and we cause
2153 # a reject as someone is playing silly buggers
2155 obj.metadata[get_or_set_metadatakey(k, session)] = val
2157 session.commit_or_flush()
2159 __all__.append('import_metadata_into_db')
2161 ################################################################################
2163 class SrcFormat(object):
2164 def __init__(self, *args, **kwargs):
2168 return '<SrcFormat %s>' % (self.format_name)
2170 __all__.append('SrcFormat')
2172 ################################################################################
2174 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2175 ('SuiteID', 'suite_id'),
2176 ('Version', 'version'),
2177 ('Origin', 'origin'),
2179 ('Description', 'description'),
2180 ('Untouchable', 'untouchable'),
2181 ('Announce', 'announce'),
2182 ('Codename', 'codename'),
2183 ('OverrideCodename', 'overridecodename'),
2184 ('ValidTime', 'validtime'),
2185 ('Priority', 'priority'),
2186 ('NotAutomatic', 'notautomatic'),
2187 ('CopyChanges', 'copychanges'),
2188 ('OverrideSuite', 'overridesuite')]
2190 # Why the heck don't we have any UNIQUE constraints in table suite?
2191 # TODO: Add UNIQUE constraints for appropriate columns.
2192 class Suite(ORMObject):
2193 def __init__(self, suite_name = None, version = None):
2194 self.suite_name = suite_name
2195 self.version = version
2197 def properties(self):
2198 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2201 def not_null_constraints(self):
2202 return ['suite_name']
2204 def __eq__(self, val):
2205 if isinstance(val, str):
2206 return (self.suite_name == val)
2207 # This signals to use the normal comparison operator
2208 return NotImplemented
2210 def __ne__(self, val):
2211 if isinstance(val, str):
2212 return (self.suite_name != val)
2213 # This signals to use the normal comparison operator
2214 return NotImplemented
2218 for disp, field in SUITE_FIELDS:
2219 val = getattr(self, field, None)
2221 ret.append("%s: %s" % (disp, val))
2223 return "\n".join(ret)
2225 def get_architectures(self, skipsrc=False, skipall=False):
2227 Returns list of Architecture objects
2229 @type skipsrc: boolean
2230 @param skipsrc: Whether to skip returning the 'source' architecture entry
2233 @type skipall: boolean
2234 @param skipall: Whether to skip returning the 'all' architecture entry
2238 @return: list of Architecture objects for the given name (may be empty)
2241 q = object_session(self).query(Architecture).with_parent(self)
2243 q = q.filter(Architecture.arch_string != 'source')
2245 q = q.filter(Architecture.arch_string != 'all')
2246 return q.order_by(Architecture.arch_string).all()
2248 def get_sources(self, source):
2250 Returns a query object representing DBSource that is part of C{suite}.
2252 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2254 @type source: string
2255 @param source: source package name
2257 @rtype: sqlalchemy.orm.query.Query
2258 @return: a query of DBSource
2262 session = object_session(self)
2263 return session.query(DBSource).filter_by(source = source). \
2266 def get_overridesuite(self):
2267 if self.overridesuite is None:
2270 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2274 return os.path.join(self.archive.path, 'dists', self.suite_name)
2276 __all__.append('Suite')
2279 def get_suite(suite, session=None):
2281 Returns Suite object for given C{suite name}.
2284 @param suite: The name of the suite
2286 @type session: Session
2287 @param session: Optional SQLA session object (a temporary one will be
2288 generated if not supplied)
2291 @return: Suite object for the requested suite name (None if not present)
2294 q = session.query(Suite).filter_by(suite_name=suite)
2298 except NoResultFound:
2301 __all__.append('get_suite')
2303 ################################################################################
2306 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2308 Returns list of Architecture objects for given C{suite} name. The list is
2309 empty if suite does not exist.
2312 @param suite: Suite name to search for
2314 @type skipsrc: boolean
2315 @param skipsrc: Whether to skip returning the 'source' architecture entry
2318 @type skipall: boolean
2319 @param skipall: Whether to skip returning the 'all' architecture entry
2322 @type session: Session
2323 @param session: Optional SQL session object (a temporary one will be
2324 generated if not supplied)
2327 @return: list of Architecture objects for the given name (may be empty)
2331 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2332 except AttributeError:
2335 __all__.append('get_suite_architectures')
2337 ################################################################################
2339 class Uid(ORMObject):
2340 def __init__(self, uid = None, name = None):
2344 def __eq__(self, val):
2345 if isinstance(val, str):
2346 return (self.uid == val)
2347 # This signals to use the normal comparison operator
2348 return NotImplemented
2350 def __ne__(self, val):
2351 if isinstance(val, str):
2352 return (self.uid != val)
2353 # This signals to use the normal comparison operator
2354 return NotImplemented
2356 def properties(self):
2357 return ['uid', 'name', 'fingerprint']
2359 def not_null_constraints(self):
2362 __all__.append('Uid')
2365 def get_or_set_uid(uidname, session=None):
2367 Returns uid object for given uidname.
2369 If no matching uidname is found, a row is inserted.
2371 @type uidname: string
2372 @param uidname: The uid to add
2374 @type session: SQLAlchemy
2375 @param session: Optional SQL session object (a temporary one will be
2376 generated if not supplied). If not passed, a commit will be performed at
2377 the end of the function, otherwise the caller is responsible for commiting.
2380 @return: the uid object for the given uidname
2383 q = session.query(Uid).filter_by(uid=uidname)
2387 except NoResultFound:
2391 session.commit_or_flush()
2396 __all__.append('get_or_set_uid')
2399 def get_uid_from_fingerprint(fpr, session=None):
2400 q = session.query(Uid)
2401 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2405 except NoResultFound:
2408 __all__.append('get_uid_from_fingerprint')
2410 ################################################################################
2412 class MetadataKey(ORMObject):
2413 def __init__(self, key = None):
2416 def properties(self):
2419 def not_null_constraints(self):
2422 __all__.append('MetadataKey')
2425 def get_or_set_metadatakey(keyname, session=None):
2427 Returns MetadataKey object for given uidname.
2429 If no matching keyname is found, a row is inserted.
2431 @type uidname: string
2432 @param uidname: The keyname to add
2434 @type session: SQLAlchemy
2435 @param session: Optional SQL session object (a temporary one will be
2436 generated if not supplied). If not passed, a commit will be performed at
2437 the end of the function, otherwise the caller is responsible for commiting.
2440 @return: the metadatakey object for the given keyname
2443 q = session.query(MetadataKey).filter_by(key=keyname)
2447 except NoResultFound:
2448 ret = MetadataKey(keyname)
2450 session.commit_or_flush()
2454 __all__.append('get_or_set_metadatakey')
2456 ################################################################################
2458 class BinaryMetadata(ORMObject):
2459 def __init__(self, key = None, value = None, binary = None):
2462 self.binary = binary
2464 def properties(self):
2465 return ['binary', 'key', 'value']
2467 def not_null_constraints(self):
2470 __all__.append('BinaryMetadata')
2472 ################################################################################
2474 class SourceMetadata(ORMObject):
2475 def __init__(self, key = None, value = None, source = None):
2478 self.source = source
2480 def properties(self):
2481 return ['source', 'key', 'value']
2483 def not_null_constraints(self):
2486 __all__.append('SourceMetadata')
2488 ################################################################################
2490 class VersionCheck(ORMObject):
2491 def __init__(self, *args, **kwargs):
2494 def properties(self):
2495 #return ['suite_id', 'check', 'reference_id']
2498 def not_null_constraints(self):
2499 return ['suite', 'check', 'reference']
2501 __all__.append('VersionCheck')
2504 def get_version_checks(suite_name, check = None, session = None):
2505 suite = get_suite(suite_name, session)
2507 # Make sure that what we return is iterable so that list comprehensions
2508 # involving this don't cause a traceback
2510 q = session.query(VersionCheck).filter_by(suite=suite)
2512 q = q.filter_by(check=check)
2515 __all__.append('get_version_checks')
2517 ################################################################################
2519 class DBConn(object):
2521 database module init.
2525 def __init__(self, *args, **kwargs):
2526 self.__dict__ = self.__shared_state
2528 if not getattr(self, 'initialised', False):
2529 self.initialised = True
2530 self.debug = kwargs.has_key('debug')
2533 def __setuptables(self):
2536 'acl_architecture_map',
2537 'acl_fingerprint_map',
2544 'binaries_metadata',
2551 'external_overrides',
2552 'extra_src_references',
2554 'files_archive_map',
2560 # TODO: the maintainer column in table override should be removed.
2564 'policy_queue_upload',
2565 'policy_queue_upload_binaries_map',
2566 'policy_queue_byhand_file',
2569 'signature_history',
2578 'suite_architectures',
2579 'suite_build_queue_copy',
2580 'suite_src_formats',
2586 'almost_obsolete_all_associations',
2587 'almost_obsolete_src_associations',
2588 'any_associations_source',
2589 'bin_associations_binaries',
2590 'binaries_suite_arch',
2593 'newest_all_associations',
2594 'newest_any_associations',
2596 'newest_src_association',
2597 'obsolete_all_associations',
2598 'obsolete_any_associations',
2599 'obsolete_any_by_all_associations',
2600 'obsolete_src_associations',
2602 'src_associations_bin',
2603 'src_associations_src',
2604 'suite_arch_by_name',
2607 for table_name in tables:
2608 table = Table(table_name, self.db_meta, \
2609 autoload=True, useexisting=True)
2610 setattr(self, 'tbl_%s' % table_name, table)
2612 for view_name in views:
2613 view = Table(view_name, self.db_meta, autoload=True)
2614 setattr(self, 'view_%s' % view_name, view)
2616 def __setupmappers(self):
2617 mapper(Architecture, self.tbl_architecture,
2618 properties = dict(arch_id = self.tbl_architecture.c.id,
2619 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2620 order_by=self.tbl_suite.c.suite_name,
2621 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2622 extension = validator)
2624 mapper(ACL, self.tbl_acl,
2626 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2627 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2628 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2629 per_source = relation(ACLPerSource, collection_class=set),
2632 mapper(ACLPerSource, self.tbl_acl_per_source,
2634 acl = relation(ACL),
2635 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2636 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2639 mapper(Archive, self.tbl_archive,
2640 properties = dict(archive_id = self.tbl_archive.c.id,
2641 archive_name = self.tbl_archive.c.name))
2643 mapper(ArchiveFile, self.tbl_files_archive_map,
2644 properties = dict(archive = relation(Archive, backref='files'),
2645 component = relation(Component),
2646 file = relation(PoolFile, backref='archives')))
2648 mapper(BuildQueue, self.tbl_build_queue,
2649 properties = dict(queue_id = self.tbl_build_queue.c.id,
2650 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2652 mapper(DBBinary, self.tbl_binaries,
2653 properties = dict(binary_id = self.tbl_binaries.c.id,
2654 package = self.tbl_binaries.c.package,
2655 version = self.tbl_binaries.c.version,
2656 maintainer_id = self.tbl_binaries.c.maintainer,
2657 maintainer = relation(Maintainer),
2658 source_id = self.tbl_binaries.c.source,
2659 source = relation(DBSource, backref='binaries'),
2660 arch_id = self.tbl_binaries.c.architecture,
2661 architecture = relation(Architecture),
2662 poolfile_id = self.tbl_binaries.c.file,
2663 poolfile = relation(PoolFile),
2664 binarytype = self.tbl_binaries.c.type,
2665 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2666 fingerprint = relation(Fingerprint),
2667 install_date = self.tbl_binaries.c.install_date,
2668 suites = relation(Suite, secondary=self.tbl_bin_associations,
2669 backref=backref('binaries', lazy='dynamic')),
2670 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2671 backref=backref('extra_binary_references', lazy='dynamic')),
2672 key = relation(BinaryMetadata, cascade='all',
2673 collection_class=attribute_mapped_collection('key'))),
2674 extension = validator)
2676 mapper(Component, self.tbl_component,
2677 properties = dict(component_id = self.tbl_component.c.id,
2678 component_name = self.tbl_component.c.name),
2679 extension = validator)
2681 mapper(DBConfig, self.tbl_config,
2682 properties = dict(config_id = self.tbl_config.c.id))
2684 mapper(DSCFile, self.tbl_dsc_files,
2685 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2686 source_id = self.tbl_dsc_files.c.source,
2687 source = relation(DBSource),
2688 poolfile_id = self.tbl_dsc_files.c.file,
2689 poolfile = relation(PoolFile)))
2691 mapper(ExternalOverride, self.tbl_external_overrides,
2693 suite_id = self.tbl_external_overrides.c.suite,
2694 suite = relation(Suite),
2695 component_id = self.tbl_external_overrides.c.component,
2696 component = relation(Component)))
2698 mapper(PoolFile, self.tbl_files,
2699 properties = dict(file_id = self.tbl_files.c.id,
2700 filesize = self.tbl_files.c.size),
2701 extension = validator)
2703 mapper(Fingerprint, self.tbl_fingerprint,
2704 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2705 uid_id = self.tbl_fingerprint.c.uid,
2706 uid = relation(Uid),
2707 keyring_id = self.tbl_fingerprint.c.keyring,
2708 keyring = relation(Keyring),
2709 acl = relation(ACL)),
2710 extension = validator)
2712 mapper(Keyring, self.tbl_keyrings,
2713 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2714 keyring_id = self.tbl_keyrings.c.id,
2715 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2717 mapper(DBChange, self.tbl_changes,
2718 properties = dict(change_id = self.tbl_changes.c.id,
2719 seen = self.tbl_changes.c.seen,
2720 source = self.tbl_changes.c.source,
2721 binaries = self.tbl_changes.c.binaries,
2722 architecture = self.tbl_changes.c.architecture,
2723 distribution = self.tbl_changes.c.distribution,
2724 urgency = self.tbl_changes.c.urgency,
2725 maintainer = self.tbl_changes.c.maintainer,
2726 changedby = self.tbl_changes.c.changedby,
2727 date = self.tbl_changes.c.date,
2728 version = self.tbl_changes.c.version))
2730 mapper(Maintainer, self.tbl_maintainer,
2731 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2732 maintains_sources = relation(DBSource, backref='maintainer',
2733 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2734 changed_sources = relation(DBSource, backref='changedby',
2735 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2736 extension = validator)
2738 mapper(NewComment, self.tbl_new_comments,
2739 properties = dict(comment_id = self.tbl_new_comments.c.id,
2740 policy_queue = relation(PolicyQueue)))
2742 mapper(Override, self.tbl_override,
2743 properties = dict(suite_id = self.tbl_override.c.suite,
2744 suite = relation(Suite, \
2745 backref=backref('overrides', lazy='dynamic')),
2746 package = self.tbl_override.c.package,
2747 component_id = self.tbl_override.c.component,
2748 component = relation(Component, \
2749 backref=backref('overrides', lazy='dynamic')),
2750 priority_id = self.tbl_override.c.priority,
2751 priority = relation(Priority, \
2752 backref=backref('overrides', lazy='dynamic')),
2753 section_id = self.tbl_override.c.section,
2754 section = relation(Section, \
2755 backref=backref('overrides', lazy='dynamic')),
2756 overridetype_id = self.tbl_override.c.type,
2757 overridetype = relation(OverrideType, \
2758 backref=backref('overrides', lazy='dynamic'))))
2760 mapper(OverrideType, self.tbl_override_type,
2761 properties = dict(overridetype = self.tbl_override_type.c.type,
2762 overridetype_id = self.tbl_override_type.c.id))
2764 mapper(PolicyQueue, self.tbl_policy_queue,
2765 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2766 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2768 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2770 changes = relation(DBChange),
2771 policy_queue = relation(PolicyQueue, backref='uploads'),
2772 target_suite = relation(Suite),
2773 source = relation(DBSource),
2774 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2777 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2779 upload = relation(PolicyQueueUpload, backref='byhand'),
2783 mapper(Priority, self.tbl_priority,
2784 properties = dict(priority_id = self.tbl_priority.c.id))
2786 mapper(Section, self.tbl_section,
2787 properties = dict(section_id = self.tbl_section.c.id,
2788 section=self.tbl_section.c.section))
2790 mapper(SignatureHistory, self.tbl_signature_history)
2792 mapper(DBSource, self.tbl_source,
2793 properties = dict(source_id = self.tbl_source.c.id,
2794 version = self.tbl_source.c.version,
2795 maintainer_id = self.tbl_source.c.maintainer,
2796 poolfile_id = self.tbl_source.c.file,
2797 poolfile = relation(PoolFile),
2798 fingerprint_id = self.tbl_source.c.sig_fpr,
2799 fingerprint = relation(Fingerprint),
2800 changedby_id = self.tbl_source.c.changedby,
2801 srcfiles = relation(DSCFile,
2802 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2803 suites = relation(Suite, secondary=self.tbl_src_associations,
2804 backref=backref('sources', lazy='dynamic')),
2805 uploaders = relation(Maintainer,
2806 secondary=self.tbl_src_uploaders),
2807 key = relation(SourceMetadata, cascade='all',
2808 collection_class=attribute_mapped_collection('key'))),
2809 extension = validator)
2811 mapper(SrcFormat, self.tbl_src_format,
2812 properties = dict(src_format_id = self.tbl_src_format.c.id,
2813 format_name = self.tbl_src_format.c.format_name))
2815 mapper(Suite, self.tbl_suite,
2816 properties = dict(suite_id = self.tbl_suite.c.id,
2817 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2818 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2819 copy_queues = relation(BuildQueue,
2820 secondary=self.tbl_suite_build_queue_copy),
2821 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2822 backref=backref('suites', lazy='dynamic')),
2823 archive = relation(Archive, backref='suites'),
2824 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set)),
2825 extension = validator)
2827 mapper(Uid, self.tbl_uid,
2828 properties = dict(uid_id = self.tbl_uid.c.id,
2829 fingerprint = relation(Fingerprint)),
2830 extension = validator)
2832 mapper(BinContents, self.tbl_bin_contents,
2834 binary = relation(DBBinary,
2835 backref=backref('contents', lazy='dynamic', cascade='all')),
2836 file = self.tbl_bin_contents.c.file))
2838 mapper(SrcContents, self.tbl_src_contents,
2840 source = relation(DBSource,
2841 backref=backref('contents', lazy='dynamic', cascade='all')),
2842 file = self.tbl_src_contents.c.file))
2844 mapper(MetadataKey, self.tbl_metadata_keys,
2846 key_id = self.tbl_metadata_keys.c.key_id,
2847 key = self.tbl_metadata_keys.c.key))
2849 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2851 binary_id = self.tbl_binaries_metadata.c.bin_id,
2852 binary = relation(DBBinary),
2853 key_id = self.tbl_binaries_metadata.c.key_id,
2854 key = relation(MetadataKey),
2855 value = self.tbl_binaries_metadata.c.value))
2857 mapper(SourceMetadata, self.tbl_source_metadata,
2859 source_id = self.tbl_source_metadata.c.src_id,
2860 source = relation(DBSource),
2861 key_id = self.tbl_source_metadata.c.key_id,
2862 key = relation(MetadataKey),
2863 value = self.tbl_source_metadata.c.value))
2865 mapper(VersionCheck, self.tbl_version_check,
2867 suite_id = self.tbl_version_check.c.suite,
2868 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2869 reference_id = self.tbl_version_check.c.reference,
2870 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2872 ## Connection functions
2873 def __createconn(self):
2874 from config import Config
2876 if cnf.has_key("DB::Service"):
2877 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2878 elif cnf.has_key("DB::Host"):
2880 connstr = "postgresql://%s" % cnf["DB::Host"]
2881 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2882 connstr += ":%s" % cnf["DB::Port"]
2883 connstr += "/%s" % cnf["DB::Name"]
2886 connstr = "postgresql:///%s" % cnf["DB::Name"]
2887 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2888 connstr += "?port=%s" % cnf["DB::Port"]
2890 engine_args = { 'echo': self.debug }
2891 if cnf.has_key('DB::PoolSize'):
2892 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2893 if cnf.has_key('DB::MaxOverflow'):
2894 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2895 if sa_major_version in ('0.6', '0.7') and cnf.has_key('DB::Unicode') and \
2896 cnf['DB::Unicode'] == 'false':
2897 engine_args['use_native_unicode'] = False
2899 # Monkey patch a new dialect in in order to support service= syntax
2900 import sqlalchemy.dialects.postgresql
2901 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2902 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2903 def create_connect_args(self, url):
2904 if str(url).startswith('postgresql://service='):
2906 servicename = str(url)[21:]
2907 return (['service=%s' % servicename], {})
2909 return PGDialect_psycopg2.create_connect_args(self, url)
2911 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2914 self.db_pg = create_engine(connstr, **engine_args)
2915 self.db_meta = MetaData()
2916 self.db_meta.bind = self.db_pg
2917 self.db_smaker = sessionmaker(bind=self.db_pg,
2921 self.__setuptables()
2922 self.__setupmappers()
2924 except OperationalError as e:
2926 utils.fubar("Cannot connect to database (%s)" % str(e))
2928 self.pid = os.getpid()
2930 def session(self, work_mem = 0):
2932 Returns a new session object. If a work_mem parameter is provided a new
2933 transaction is started and the work_mem parameter is set for this
2934 transaction. The work_mem parameter is measured in MB. A default value
2935 will be used if the parameter is not set.
2937 # reinitialize DBConn in new processes
2938 if self.pid != os.getpid():
2941 session = self.db_smaker()
2943 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2946 __all__.append('DBConn')