5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 warnings.filterwarnings('ignore', \
85 "Predicate of partial index .* ignored during reflection", \
89 ################################################################################
91 # Patch in support for the debversion field type so that it works during
95 # that is for sqlalchemy 0.6
96 UserDefinedType = sqltypes.UserDefinedType
98 # this one for sqlalchemy 0.5
99 UserDefinedType = sqltypes.TypeEngine
101 class DebVersion(UserDefinedType):
102 def get_col_spec(self):
105 def bind_processor(self, dialect):
108 # ' = None' is needed for sqlalchemy 0.5:
109 def result_processor(self, dialect, coltype = None):
112 sa_major_version = sqlalchemy.__version__[0:3]
113 if sa_major_version in ["0.5", "0.6", "0.7"]:
114 from sqlalchemy.databases import postgres
115 postgres.ischema_names['debversion'] = DebVersion
117 raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py")
119 ################################################################################
121 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
123 ################################################################################
125 def session_wrapper(fn):
127 Wrapper around common ".., session=None):" handling. If the wrapped
128 function is called without passing 'session', we create a local one
129 and destroy it when the function ends.
131 Also attaches a commit_or_flush method to the session; if we created a
132 local session, this is a synonym for session.commit(), otherwise it is a
133 synonym for session.flush().
136 def wrapped(*args, **kwargs):
137 private_transaction = False
139 # Find the session object
140 session = kwargs.get('session')
143 if len(args) <= len(getargspec(fn)[0]) - 1:
144 # No session specified as last argument or in kwargs
145 private_transaction = True
146 session = kwargs['session'] = DBConn().session()
148 # Session is last argument in args
152 session = args[-1] = DBConn().session()
153 private_transaction = True
155 if private_transaction:
156 session.commit_or_flush = session.commit
158 session.commit_or_flush = session.flush
161 return fn(*args, **kwargs)
163 if private_transaction:
164 # We created a session; close it.
167 wrapped.__doc__ = fn.__doc__
168 wrapped.func_name = fn.func_name
172 __all__.append('session_wrapper')
174 ################################################################################
176 class ORMObject(object):
178 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
179 derived classes must implement the properties() method.
182 def properties(self):
184 This method should be implemented by all derived classes and returns a
185 list of the important properties. The properties 'created' and
186 'modified' will be added automatically. A suffix '_count' should be
187 added to properties that are lists or query objects. The most important
188 property name should be returned as the first element in the list
189 because it is used by repr().
195 Returns a JSON representation of the object based on the properties
196 returned from the properties() method.
199 # add created and modified
200 all_properties = self.properties() + ['created', 'modified']
201 for property in all_properties:
202 # check for list or query
203 if property[-6:] == '_count':
204 real_property = property[:-6]
205 if not hasattr(self, real_property):
207 value = getattr(self, real_property)
208 if hasattr(value, '__len__'):
211 elif hasattr(value, 'count'):
212 # query (but not during validation)
213 if self.in_validation:
215 value = value.count()
217 raise KeyError('Do not understand property %s.' % property)
219 if not hasattr(self, property):
222 value = getattr(self, property)
226 elif isinstance(value, ORMObject):
227 # use repr() for ORMObject types
230 # we want a string for all other types because json cannot
233 data[property] = value
234 return json.dumps(data)
238 Returns the name of the class.
240 return type(self).__name__
244 Returns a short string representation of the object using the first
245 element from the properties() method.
247 primary_property = self.properties()[0]
248 value = getattr(self, primary_property)
249 return '<%s %s>' % (self.classname(), str(value))
253 Returns a human readable form of the object using the properties()
256 return '<%s %s>' % (self.classname(), self.json())
258 def not_null_constraints(self):
260 Returns a list of properties that must be not NULL. Derived classes
261 should override this method if needed.
265 validation_message = \
266 "Validation failed because property '%s' must not be empty in object\n%s"
268 in_validation = False
272 This function validates the not NULL constraints as returned by
273 not_null_constraints(). It raises the DBUpdateError exception if
276 for property in self.not_null_constraints():
277 # TODO: It is a bit awkward that the mapper configuration allow
278 # directly setting the numeric _id columns. We should get rid of it
280 if hasattr(self, property + '_id') and \
281 getattr(self, property + '_id') is not None:
283 if not hasattr(self, property) or getattr(self, property) is None:
284 # str() might lead to races due to a 2nd flush
285 self.in_validation = True
286 message = self.validation_message % (property, str(self))
287 self.in_validation = False
288 raise DBUpdateError(message)
292 def get(cls, primary_key, session = None):
294 This is a support function that allows getting an object by its primary
297 Architecture.get(3[, session])
299 instead of the more verbose
301 session.query(Architecture).get(3)
303 return session.query(cls).get(primary_key)
305 def session(self, replace = False):
307 Returns the current session that is associated with the object. May
308 return None is object is in detached state.
311 return object_session(self)
313 def clone(self, session = None):
315 Clones the current object in a new session and returns the new clone. A
316 fresh session is created if the optional session parameter is not
317 provided. The function will fail if a session is provided and has
320 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
321 an existing object to allow several threads to work with their own
322 instances of an ORMObject.
324 WARNING: Only persistent (committed) objects can be cloned. Changes
325 made to the original object that are not committed yet will get lost.
326 The session of the new object will always be rolled back to avoid
330 if self.session() is None:
331 raise RuntimeError( \
332 'Method clone() failed for detached object:\n%s' % self)
333 self.session().flush()
334 mapper = object_mapper(self)
335 primary_key = mapper.primary_key_from_instance(self)
336 object_class = self.__class__
338 session = DBConn().session()
339 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
340 raise RuntimeError( \
341 'Method clone() failed due to unflushed changes in session.')
342 new_object = session.query(object_class).get(primary_key)
344 if new_object is None:
345 raise RuntimeError( \
346 'Method clone() failed for non-persistent object:\n%s' % self)
349 __all__.append('ORMObject')
351 ################################################################################
353 class Validator(MapperExtension):
355 This class calls the validate() method for each instance for the
356 'before_update' and 'before_insert' events. A global object validator is
357 used for configuring the individual mappers.
360 def before_update(self, mapper, connection, instance):
364 def before_insert(self, mapper, connection, instance):
368 validator = Validator()
370 ################################################################################
372 class ACL(ORMObject):
374 return "<ACL {0}>".format(self.name)
376 __all__.append('ACL')
378 class ACLPerSource(ORMObject):
380 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
382 __all__.append('ACLPerSource')
384 ################################################################################
386 class Architecture(ORMObject):
387 def __init__(self, arch_string = None, description = None):
388 self.arch_string = arch_string
389 self.description = description
391 def __eq__(self, val):
392 if isinstance(val, str):
393 return (self.arch_string== val)
394 # This signals to use the normal comparison operator
395 return NotImplemented
397 def __ne__(self, val):
398 if isinstance(val, str):
399 return (self.arch_string != val)
400 # This signals to use the normal comparison operator
401 return NotImplemented
403 def properties(self):
404 return ['arch_string', 'arch_id', 'suites_count']
406 def not_null_constraints(self):
407 return ['arch_string']
409 __all__.append('Architecture')
412 def get_architecture(architecture, session=None):
414 Returns database id for given C{architecture}.
416 @type architecture: string
417 @param architecture: The name of the architecture
419 @type session: Session
420 @param session: Optional SQLA session object (a temporary one will be
421 generated if not supplied)
424 @return: Architecture object for the given arch (None if not present)
427 q = session.query(Architecture).filter_by(arch_string=architecture)
431 except NoResultFound:
434 __all__.append('get_architecture')
436 # TODO: should be removed because the implementation is too trivial
438 def get_architecture_suites(architecture, session=None):
440 Returns list of Suite objects for given C{architecture} name
442 @type architecture: str
443 @param architecture: Architecture name to search for
445 @type session: Session
446 @param session: Optional SQL session object (a temporary one will be
447 generated if not supplied)
450 @return: list of Suite objects for the given name (may be empty)
453 return get_architecture(architecture, session).suites
455 __all__.append('get_architecture_suites')
457 ################################################################################
459 class Archive(object):
460 def __init__(self, *args, **kwargs):
464 return '<Archive %s>' % self.archive_name
466 __all__.append('Archive')
469 def get_archive(archive, session=None):
471 returns database id for given C{archive}.
473 @type archive: string
474 @param archive: the name of the arhive
476 @type session: Session
477 @param session: Optional SQLA session object (a temporary one will be
478 generated if not supplied)
481 @return: Archive object for the given name (None if not present)
484 archive = archive.lower()
486 q = session.query(Archive).filter_by(archive_name=archive)
490 except NoResultFound:
493 __all__.append('get_archive')
495 ################################################################################
497 class ArchiveFile(object):
498 def __init__(self, archive=None, component=None, file=None):
499 self.archive = archive
500 self.component = component
504 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
506 __all__.append('ArchiveFile')
508 ################################################################################
510 class BinContents(ORMObject):
511 def __init__(self, file = None, binary = None):
515 def properties(self):
516 return ['file', 'binary']
518 __all__.append('BinContents')
520 ################################################################################
522 def subprocess_setup():
523 # Python installs a SIGPIPE handler by default. This is usually not what
524 # non-Python subprocesses expect.
525 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
527 class DBBinary(ORMObject):
528 def __init__(self, package = None, source = None, version = None, \
529 maintainer = None, architecture = None, poolfile = None, \
530 binarytype = 'deb', fingerprint=None):
531 self.package = package
533 self.version = version
534 self.maintainer = maintainer
535 self.architecture = architecture
536 self.poolfile = poolfile
537 self.binarytype = binarytype
538 self.fingerprint = fingerprint
542 return self.binary_id
544 def properties(self):
545 return ['package', 'version', 'maintainer', 'source', 'architecture', \
546 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
547 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
549 def not_null_constraints(self):
550 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
553 metadata = association_proxy('key', 'value')
555 def scan_contents(self):
557 Yields the contents of the package. Only regular files are yielded and
558 the path names are normalized after converting them from either utf-8
559 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
560 package does not contain any regular file.
562 fullpath = self.poolfile.fullpath
563 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
564 preexec_fn = subprocess_setup)
565 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
566 for member in tar.getmembers():
567 if not member.isdir():
568 name = normpath(member.name)
569 # enforce proper utf-8 encoding
572 except UnicodeDecodeError:
573 name = name.decode('iso8859-1').encode('utf-8')
579 def read_control(self):
581 Reads the control information from a binary.
584 @return: stanza text of the control section.
587 fullpath = self.poolfile.fullpath
588 deb_file = open(fullpath, 'r')
589 stanza = utils.deb_extract_control(deb_file)
594 def read_control_fields(self):
596 Reads the control information from a binary and return
600 @return: fields of the control section as a dictionary.
603 stanza = self.read_control()
604 return apt_pkg.TagSection(stanza)
606 __all__.append('DBBinary')
609 def get_suites_binary_in(package, session=None):
611 Returns list of Suite objects which given C{package} name is in
614 @param package: DBBinary package name to search for
617 @return: list of Suite objects for the given package
620 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
622 __all__.append('get_suites_binary_in')
625 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
627 Returns the component name of the newest binary package in suite_list or
628 None if no package is found. The result can be optionally filtered by a list
629 of architecture names.
632 @param package: DBBinary package name to search for
634 @type suite_list: list of str
635 @param suite_list: list of suite_name items
637 @type arch_list: list of str
638 @param arch_list: optional list of arch_string items that defaults to []
640 @rtype: str or NoneType
641 @return: name of component or None
644 q = session.query(DBBinary).filter_by(package = package). \
645 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
646 if len(arch_list) > 0:
647 q = q.join(DBBinary.architecture). \
648 filter(Architecture.arch_string.in_(arch_list))
649 binary = q.order_by(desc(DBBinary.version)).first()
653 return binary.poolfile.component.component_name
655 __all__.append('get_component_by_package_suite')
657 ################################################################################
659 class BuildQueue(object):
660 def __init__(self, *args, **kwargs):
664 return '<BuildQueue %s>' % self.queue_name
666 __all__.append('BuildQueue')
668 ################################################################################
670 class Component(ORMObject):
671 def __init__(self, component_name = None):
672 self.component_name = component_name
674 def __eq__(self, val):
675 if isinstance(val, str):
676 return (self.component_name == val)
677 # This signals to use the normal comparison operator
678 return NotImplemented
680 def __ne__(self, val):
681 if isinstance(val, str):
682 return (self.component_name != val)
683 # This signals to use the normal comparison operator
684 return NotImplemented
686 def properties(self):
687 return ['component_name', 'component_id', 'description', \
688 'meets_dfsg', 'overrides_count']
690 def not_null_constraints(self):
691 return ['component_name']
694 __all__.append('Component')
697 def get_component(component, session=None):
699 Returns database id for given C{component}.
701 @type component: string
702 @param component: The name of the override type
705 @return: the database id for the given component
708 component = component.lower()
710 q = session.query(Component).filter_by(component_name=component)
714 except NoResultFound:
717 __all__.append('get_component')
720 def get_mapped_component(component_name, session=None):
721 """get component after mappings
723 Evaluate component mappings from ComponentMappings in dak.conf for the
724 given component name.
726 @todo: ansgar wants to get rid of this. It's currently only used for
729 @type component_name: str
730 @param component_name: component name
732 @param session: database session
734 @rtype: L{daklib.dbconn.Component} or C{None}
735 @return: component after applying maps or C{None}
738 for m in cnf.value_list("ComponentMappings"):
739 (src, dst) = m.split()
740 if component_name == src:
742 component = session.query(Component).filter_by(component_name=component_name).first()
745 __all__.append('get_mapped_component')
748 def get_component_names(session=None):
750 Returns list of strings of component names.
753 @return: list of strings of component names
756 return [ x.component_name for x in session.query(Component).all() ]
758 __all__.append('get_component_names')
760 ################################################################################
762 class DBConfig(object):
763 def __init__(self, *args, **kwargs):
767 return '<DBConfig %s>' % self.name
769 __all__.append('DBConfig')
771 ################################################################################
774 def get_or_set_contents_file_id(filename, session=None):
776 Returns database id for given filename.
778 If no matching file is found, a row is inserted.
780 @type filename: string
781 @param filename: The filename
782 @type session: SQLAlchemy
783 @param session: Optional SQL session object (a temporary one will be
784 generated if not supplied). If not passed, a commit will be performed at
785 the end of the function, otherwise the caller is responsible for commiting.
788 @return: the database id for the given component
791 q = session.query(ContentFilename).filter_by(filename=filename)
794 ret = q.one().cafilename_id
795 except NoResultFound:
796 cf = ContentFilename()
797 cf.filename = filename
799 session.commit_or_flush()
800 ret = cf.cafilename_id
804 __all__.append('get_or_set_contents_file_id')
807 def get_contents(suite, overridetype, section=None, session=None):
809 Returns contents for a suite / overridetype combination, limiting
810 to a section if not None.
813 @param suite: Suite object
815 @type overridetype: OverrideType
816 @param overridetype: OverrideType object
818 @type section: Section
819 @param section: Optional section object to limit results to
821 @type session: SQLAlchemy
822 @param session: Optional SQL session object (a temporary one will be
823 generated if not supplied)
826 @return: ResultsProxy object set up to return tuples of (filename, section,
830 # find me all of the contents for a given suite
831 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
835 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
836 JOIN content_file_names n ON (c.filename=n.id)
837 JOIN binaries b ON (b.id=c.binary_pkg)
838 JOIN override o ON (o.package=b.package)
839 JOIN section s ON (s.id=o.section)
840 WHERE o.suite = :suiteid AND o.type = :overridetypeid
841 AND b.type=:overridetypename"""
843 vals = {'suiteid': suite.suite_id,
844 'overridetypeid': overridetype.overridetype_id,
845 'overridetypename': overridetype.overridetype}
847 if section is not None:
848 contents_q += " AND s.id = :sectionid"
849 vals['sectionid'] = section.section_id
851 contents_q += " ORDER BY fn"
853 return session.execute(contents_q, vals)
855 __all__.append('get_contents')
857 ################################################################################
859 class ContentFilepath(object):
860 def __init__(self, *args, **kwargs):
864 return '<ContentFilepath %s>' % self.filepath
866 __all__.append('ContentFilepath')
869 def get_or_set_contents_path_id(filepath, session=None):
871 Returns database id for given path.
873 If no matching file is found, a row is inserted.
875 @type filepath: string
876 @param filepath: The filepath
878 @type session: SQLAlchemy
879 @param session: Optional SQL session object (a temporary one will be
880 generated if not supplied). If not passed, a commit will be performed at
881 the end of the function, otherwise the caller is responsible for commiting.
884 @return: the database id for the given path
887 q = session.query(ContentFilepath).filter_by(filepath=filepath)
890 ret = q.one().cafilepath_id
891 except NoResultFound:
892 cf = ContentFilepath()
893 cf.filepath = filepath
895 session.commit_or_flush()
896 ret = cf.cafilepath_id
900 __all__.append('get_or_set_contents_path_id')
902 ################################################################################
904 class ContentAssociation(object):
905 def __init__(self, *args, **kwargs):
909 return '<ContentAssociation %s>' % self.ca_id
911 __all__.append('ContentAssociation')
913 def insert_content_paths(binary_id, fullpaths, session=None):
915 Make sure given path is associated with given binary id
918 @param binary_id: the id of the binary
919 @type fullpaths: list
920 @param fullpaths: the list of paths of the file being associated with the binary
921 @type session: SQLAlchemy session
922 @param session: Optional SQLAlchemy session. If this is passed, the caller
923 is responsible for ensuring a transaction has begun and committing the
924 results or rolling back based on the result code. If not passed, a commit
925 will be performed at the end of the function, otherwise the caller is
926 responsible for commiting.
928 @return: True upon success
933 session = DBConn().session()
938 def generate_path_dicts():
939 for fullpath in fullpaths:
940 if fullpath.startswith( './' ):
941 fullpath = fullpath[2:]
943 yield {'filename':fullpath, 'id': binary_id }
945 for d in generate_path_dicts():
946 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
955 traceback.print_exc()
957 # Only rollback if we set up the session ourself
964 __all__.append('insert_content_paths')
966 ################################################################################
968 class DSCFile(object):
969 def __init__(self, *args, **kwargs):
973 return '<DSCFile %s>' % self.dscfile_id
975 __all__.append('DSCFile')
978 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
980 Returns a list of DSCFiles which may be empty
982 @type dscfile_id: int (optional)
983 @param dscfile_id: the dscfile_id of the DSCFiles to find
985 @type source_id: int (optional)
986 @param source_id: the source id related to the DSCFiles to find
988 @type poolfile_id: int (optional)
989 @param poolfile_id: the poolfile id related to the DSCFiles to find
992 @return: Possibly empty list of DSCFiles
995 q = session.query(DSCFile)
997 if dscfile_id is not None:
998 q = q.filter_by(dscfile_id=dscfile_id)
1000 if source_id is not None:
1001 q = q.filter_by(source_id=source_id)
1003 if poolfile_id is not None:
1004 q = q.filter_by(poolfile_id=poolfile_id)
1008 __all__.append('get_dscfiles')
1010 ################################################################################
1012 class ExternalOverride(ORMObject):
1013 def __init__(self, *args, **kwargs):
1017 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1019 __all__.append('ExternalOverride')
1021 ################################################################################
1023 class PoolFile(ORMObject):
1024 def __init__(self, filename = None, filesize = -1, \
1026 self.filename = filename
1027 self.filesize = filesize
1028 self.md5sum = md5sum
1032 session = DBConn().session().object_session(self)
1033 af = session.query(ArchiveFile).join(Archive).filter(ArchiveFile.file == self).first()
1037 def component(self):
1038 session = DBConn().session().object_session(self)
1039 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1040 .group_by(ArchiveFile.component_id).one()
1041 return session.query(Component).get(component_id)
1045 return os.path.basename(self.filename)
1047 def is_valid(self, filesize = -1, md5sum = None):
1048 return self.filesize == long(filesize) and self.md5sum == md5sum
1050 def properties(self):
1051 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1052 'sha256sum', 'source', 'binary', 'last_used']
1054 def not_null_constraints(self):
1055 return ['filename', 'md5sum']
1057 def identical_to(self, filename):
1059 compare size and hash with the given file
1062 @return: true if the given file has the same size and hash as this object; false otherwise
1064 st = os.stat(filename)
1065 if self.filesize != st.st_size:
1068 f = open(filename, "r")
1069 sha256sum = apt_pkg.sha256sum(f)
1070 if sha256sum != self.sha256sum:
1075 __all__.append('PoolFile')
1078 def get_poolfile_like_name(filename, session=None):
1080 Returns an array of PoolFile objects which are like the given name
1082 @type filename: string
1083 @param filename: the filename of the file to check against the DB
1086 @return: array of PoolFile objects
1089 # TODO: There must be a way of properly using bind parameters with %FOO%
1090 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1094 __all__.append('get_poolfile_like_name')
1096 ################################################################################
1098 class Fingerprint(ORMObject):
1099 def __init__(self, fingerprint = None):
1100 self.fingerprint = fingerprint
1102 def properties(self):
1103 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1106 def not_null_constraints(self):
1107 return ['fingerprint']
1109 __all__.append('Fingerprint')
1112 def get_fingerprint(fpr, session=None):
1114 Returns Fingerprint object for given fpr.
1117 @param fpr: The fpr to find / add
1119 @type session: SQLAlchemy
1120 @param session: Optional SQL session object (a temporary one will be
1121 generated if not supplied).
1124 @return: the Fingerprint object for the given fpr or None
1127 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1131 except NoResultFound:
1136 __all__.append('get_fingerprint')
1139 def get_or_set_fingerprint(fpr, session=None):
1141 Returns Fingerprint object for given fpr.
1143 If no matching fpr is found, a row is inserted.
1146 @param fpr: The fpr to find / add
1148 @type session: SQLAlchemy
1149 @param session: Optional SQL session object (a temporary one will be
1150 generated if not supplied). If not passed, a commit will be performed at
1151 the end of the function, otherwise the caller is responsible for commiting.
1152 A flush will be performed either way.
1155 @return: the Fingerprint object for the given fpr
1158 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1162 except NoResultFound:
1163 fingerprint = Fingerprint()
1164 fingerprint.fingerprint = fpr
1165 session.add(fingerprint)
1166 session.commit_or_flush()
1171 __all__.append('get_or_set_fingerprint')
1173 ################################################################################
1175 # Helper routine for Keyring class
1176 def get_ldap_name(entry):
1178 for k in ["cn", "mn", "sn"]:
1180 if ret and ret[0] != "" and ret[0] != "-":
1182 return " ".join(name)
1184 ################################################################################
1186 class Keyring(object):
1187 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1188 " --with-colons --fingerprint --fingerprint"
1193 def __init__(self, *args, **kwargs):
1197 return '<Keyring %s>' % self.keyring_name
1199 def de_escape_gpg_str(self, txt):
1200 esclist = re.split(r'(\\x..)', txt)
1201 for x in range(1,len(esclist),2):
1202 esclist[x] = "%c" % (int(esclist[x][2:],16))
1203 return "".join(esclist)
1205 def parse_address(self, uid):
1206 """parses uid and returns a tuple of real name and email address"""
1208 (name, address) = email.Utils.parseaddr(uid)
1209 name = re.sub(r"\s*[(].*[)]", "", name)
1210 name = self.de_escape_gpg_str(name)
1213 return (name, address)
1215 def load_keys(self, keyring):
1216 if not self.keyring_id:
1217 raise Exception('Must be initialized with database information')
1219 k = os.popen(self.gpg_invocation % keyring, "r")
1224 field = line.split(":")
1225 if field[0] == "pub":
1228 (name, addr) = self.parse_address(field[9])
1230 self.keys[key]["email"] = addr
1231 self.keys[key]["name"] = name
1232 self.keys[key]["fingerprints"] = []
1234 elif key and field[0] == "sub" and len(field) >= 12:
1235 signingkey = ("s" in field[11])
1236 elif key and field[0] == "uid":
1237 (name, addr) = self.parse_address(field[9])
1238 if "email" not in self.keys[key] and "@" in addr:
1239 self.keys[key]["email"] = addr
1240 self.keys[key]["name"] = name
1241 elif signingkey and field[0] == "fpr":
1242 self.keys[key]["fingerprints"].append(field[9])
1243 self.fpr_lookup[field[9]] = key
1245 def import_users_from_ldap(self, session):
1249 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1250 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1252 l = ldap.open(LDAPServer)
1253 l.simple_bind_s("","")
1254 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1255 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1256 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1258 ldap_fin_uid_id = {}
1265 uid = entry["uid"][0]
1266 name = get_ldap_name(entry)
1267 fingerprints = entry["keyFingerPrint"]
1269 for f in fingerprints:
1270 key = self.fpr_lookup.get(f, None)
1271 if key not in self.keys:
1273 self.keys[key]["uid"] = uid
1277 keyid = get_or_set_uid(uid, session).uid_id
1278 byuid[keyid] = (uid, name)
1279 byname[uid] = (keyid, name)
1281 return (byname, byuid)
1283 def generate_users_from_keyring(self, format, session):
1287 for x in self.keys.keys():
1288 if "email" not in self.keys[x]:
1290 self.keys[x]["uid"] = format % "invalid-uid"
1292 uid = format % self.keys[x]["email"]
1293 keyid = get_or_set_uid(uid, session).uid_id
1294 byuid[keyid] = (uid, self.keys[x]["name"])
1295 byname[uid] = (keyid, self.keys[x]["name"])
1296 self.keys[x]["uid"] = uid
1299 uid = format % "invalid-uid"
1300 keyid = get_or_set_uid(uid, session).uid_id
1301 byuid[keyid] = (uid, "ungeneratable user id")
1302 byname[uid] = (keyid, "ungeneratable user id")
1304 return (byname, byuid)
1306 __all__.append('Keyring')
1309 def get_keyring(keyring, session=None):
1311 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1312 If C{keyring} already has an entry, simply return the existing Keyring
1314 @type keyring: string
1315 @param keyring: the keyring name
1318 @return: the Keyring object for this keyring
1321 q = session.query(Keyring).filter_by(keyring_name=keyring)
1325 except NoResultFound:
1328 __all__.append('get_keyring')
1331 def get_active_keyring_paths(session=None):
1334 @return: list of active keyring paths
1336 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1338 __all__.append('get_active_keyring_paths')
1341 def get_primary_keyring_path(session=None):
1343 Get the full path to the highest priority active keyring
1346 @return: path to the active keyring with the highest priority or None if no
1347 keyring is configured
1349 keyrings = get_active_keyring_paths()
1351 if len(keyrings) > 0:
1356 __all__.append('get_primary_keyring_path')
1358 ################################################################################
1360 class DBChange(object):
1361 def __init__(self, *args, **kwargs):
1365 return '<DBChange %s>' % self.changesname
1367 __all__.append('DBChange')
1370 def get_dbchange(filename, session=None):
1372 returns DBChange object for given C{filename}.
1374 @type filename: string
1375 @param filename: the name of the file
1377 @type session: Session
1378 @param session: Optional SQLA session object (a temporary one will be
1379 generated if not supplied)
1382 @return: DBChange object for the given filename (C{None} if not present)
1385 q = session.query(DBChange).filter_by(changesname=filename)
1389 except NoResultFound:
1392 __all__.append('get_dbchange')
1394 ################################################################################
1396 class Maintainer(ORMObject):
1397 def __init__(self, name = None):
1400 def properties(self):
1401 return ['name', 'maintainer_id']
1403 def not_null_constraints(self):
1406 def get_split_maintainer(self):
1407 if not hasattr(self, 'name') or self.name is None:
1408 return ('', '', '', '')
1410 return fix_maintainer(self.name.strip())
1412 __all__.append('Maintainer')
1415 def get_or_set_maintainer(name, session=None):
1417 Returns Maintainer object for given maintainer name.
1419 If no matching maintainer name is found, a row is inserted.
1422 @param name: The maintainer name to add
1424 @type session: SQLAlchemy
1425 @param session: Optional SQL session object (a temporary one will be
1426 generated if not supplied). If not passed, a commit will be performed at
1427 the end of the function, otherwise the caller is responsible for commiting.
1428 A flush will be performed either way.
1431 @return: the Maintainer object for the given maintainer
1434 q = session.query(Maintainer).filter_by(name=name)
1437 except NoResultFound:
1438 maintainer = Maintainer()
1439 maintainer.name = name
1440 session.add(maintainer)
1441 session.commit_or_flush()
1446 __all__.append('get_or_set_maintainer')
1449 def get_maintainer(maintainer_id, session=None):
1451 Return the name of the maintainer behind C{maintainer_id} or None if that
1452 maintainer_id is invalid.
1454 @type maintainer_id: int
1455 @param maintainer_id: the id of the maintainer
1458 @return: the Maintainer with this C{maintainer_id}
1461 return session.query(Maintainer).get(maintainer_id)
1463 __all__.append('get_maintainer')
1465 ################################################################################
1467 class NewComment(object):
1468 def __init__(self, *args, **kwargs):
1472 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1474 __all__.append('NewComment')
1477 def has_new_comment(package, version, session=None):
1479 Returns true if the given combination of C{package}, C{version} has a comment.
1481 @type package: string
1482 @param package: name of the package
1484 @type version: string
1485 @param version: package version
1487 @type session: Session
1488 @param session: Optional SQLA session object (a temporary one will be
1489 generated if not supplied)
1495 q = session.query(NewComment)
1496 q = q.filter_by(package=package)
1497 q = q.filter_by(version=version)
1499 return bool(q.count() > 0)
1501 __all__.append('has_new_comment')
1504 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1506 Returns (possibly empty) list of NewComment objects for the given
1509 @type package: string (optional)
1510 @param package: name of the package
1512 @type version: string (optional)
1513 @param version: package version
1515 @type comment_id: int (optional)
1516 @param comment_id: An id of a comment
1518 @type session: Session
1519 @param session: Optional SQLA session object (a temporary one will be
1520 generated if not supplied)
1523 @return: A (possibly empty) list of NewComment objects will be returned
1526 q = session.query(NewComment)
1527 if package is not None: q = q.filter_by(package=package)
1528 if version is not None: q = q.filter_by(version=version)
1529 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1533 __all__.append('get_new_comments')
1535 ################################################################################
1537 class Override(ORMObject):
1538 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1539 section = None, priority = None):
1540 self.package = package
1542 self.component = component
1543 self.overridetype = overridetype
1544 self.section = section
1545 self.priority = priority
1547 def properties(self):
1548 return ['package', 'suite', 'component', 'overridetype', 'section', \
1551 def not_null_constraints(self):
1552 return ['package', 'suite', 'component', 'overridetype', 'section']
1554 __all__.append('Override')
1557 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1559 Returns Override object for the given parameters
1561 @type package: string
1562 @param package: The name of the package
1564 @type suite: string, list or None
1565 @param suite: The name of the suite (or suites if a list) to limit to. If
1566 None, don't limit. Defaults to None.
1568 @type component: string, list or None
1569 @param component: The name of the component (or components if a list) to
1570 limit to. If None, don't limit. Defaults to None.
1572 @type overridetype: string, list or None
1573 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1574 limit to. If None, don't limit. Defaults to None.
1576 @type session: Session
1577 @param session: Optional SQLA session object (a temporary one will be
1578 generated if not supplied)
1581 @return: A (possibly empty) list of Override objects will be returned
1584 q = session.query(Override)
1585 q = q.filter_by(package=package)
1587 if suite is not None:
1588 if not isinstance(suite, list): suite = [suite]
1589 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1591 if component is not None:
1592 if not isinstance(component, list): component = [component]
1593 q = q.join(Component).filter(Component.component_name.in_(component))
1595 if overridetype is not None:
1596 if not isinstance(overridetype, list): overridetype = [overridetype]
1597 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1601 __all__.append('get_override')
1604 ################################################################################
1606 class OverrideType(ORMObject):
1607 def __init__(self, overridetype = None):
1608 self.overridetype = overridetype
1610 def properties(self):
1611 return ['overridetype', 'overridetype_id', 'overrides_count']
1613 def not_null_constraints(self):
1614 return ['overridetype']
1616 __all__.append('OverrideType')
1619 def get_override_type(override_type, session=None):
1621 Returns OverrideType object for given C{override type}.
1623 @type override_type: string
1624 @param override_type: The name of the override type
1626 @type session: Session
1627 @param session: Optional SQLA session object (a temporary one will be
1628 generated if not supplied)
1631 @return: the database id for the given override type
1634 q = session.query(OverrideType).filter_by(overridetype=override_type)
1638 except NoResultFound:
1641 __all__.append('get_override_type')
1643 ################################################################################
1645 class PolicyQueue(object):
1646 def __init__(self, *args, **kwargs):
1650 return '<PolicyQueue %s>' % self.queue_name
1652 __all__.append('PolicyQueue')
1655 def get_policy_queue(queuename, session=None):
1657 Returns PolicyQueue object for given C{queue name}
1659 @type queuename: string
1660 @param queuename: The name of the queue
1662 @type session: Session
1663 @param session: Optional SQLA session object (a temporary one will be
1664 generated if not supplied)
1667 @return: PolicyQueue object for the given queue
1670 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1674 except NoResultFound:
1677 __all__.append('get_policy_queue')
1679 ################################################################################
1681 class PolicyQueueUpload(object):
1682 def __cmp__(self, other):
1683 ret = cmp(self.changes.source, other.changes.source)
1685 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1687 if self.source is not None and other.source is None:
1689 elif self.source is None and other.source is not None:
1692 ret = cmp(self.changes.changesname, other.changes.changesname)
1695 __all__.append('PolicyQueueUpload')
1697 ################################################################################
1699 class PolicyQueueByhandFile(object):
1702 __all__.append('PolicyQueueByhandFile')
1704 ################################################################################
1706 class Priority(ORMObject):
1707 def __init__(self, priority = None, level = None):
1708 self.priority = priority
1711 def properties(self):
1712 return ['priority', 'priority_id', 'level', 'overrides_count']
1714 def not_null_constraints(self):
1715 return ['priority', 'level']
1717 def __eq__(self, val):
1718 if isinstance(val, str):
1719 return (self.priority == val)
1720 # This signals to use the normal comparison operator
1721 return NotImplemented
1723 def __ne__(self, val):
1724 if isinstance(val, str):
1725 return (self.priority != val)
1726 # This signals to use the normal comparison operator
1727 return NotImplemented
1729 __all__.append('Priority')
1732 def get_priority(priority, session=None):
1734 Returns Priority object for given C{priority name}.
1736 @type priority: string
1737 @param priority: The name of the priority
1739 @type session: Session
1740 @param session: Optional SQLA session object (a temporary one will be
1741 generated if not supplied)
1744 @return: Priority object for the given priority
1747 q = session.query(Priority).filter_by(priority=priority)
1751 except NoResultFound:
1754 __all__.append('get_priority')
1757 def get_priorities(session=None):
1759 Returns dictionary of priority names -> id mappings
1761 @type session: Session
1762 @param session: Optional SQL session object (a temporary one will be
1763 generated if not supplied)
1766 @return: dictionary of priority names -> id mappings
1770 q = session.query(Priority)
1772 ret[x.priority] = x.priority_id
1776 __all__.append('get_priorities')
1778 ################################################################################
1780 class Section(ORMObject):
1781 def __init__(self, section = None):
1782 self.section = section
1784 def properties(self):
1785 return ['section', 'section_id', 'overrides_count']
1787 def not_null_constraints(self):
1790 def __eq__(self, val):
1791 if isinstance(val, str):
1792 return (self.section == val)
1793 # This signals to use the normal comparison operator
1794 return NotImplemented
1796 def __ne__(self, val):
1797 if isinstance(val, str):
1798 return (self.section != val)
1799 # This signals to use the normal comparison operator
1800 return NotImplemented
1802 __all__.append('Section')
1805 def get_section(section, session=None):
1807 Returns Section object for given C{section name}.
1809 @type section: string
1810 @param section: The name of the section
1812 @type session: Session
1813 @param session: Optional SQLA session object (a temporary one will be
1814 generated if not supplied)
1817 @return: Section object for the given section name
1820 q = session.query(Section).filter_by(section=section)
1824 except NoResultFound:
1827 __all__.append('get_section')
1830 def get_sections(session=None):
1832 Returns dictionary of section names -> id mappings
1834 @type session: Session
1835 @param session: Optional SQL session object (a temporary one will be
1836 generated if not supplied)
1839 @return: dictionary of section names -> id mappings
1843 q = session.query(Section)
1845 ret[x.section] = x.section_id
1849 __all__.append('get_sections')
1851 ################################################################################
1853 class SrcContents(ORMObject):
1854 def __init__(self, file = None, source = None):
1856 self.source = source
1858 def properties(self):
1859 return ['file', 'source']
1861 __all__.append('SrcContents')
1863 ################################################################################
1865 from debian.debfile import Deb822
1867 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1868 class Dak822(Deb822):
1869 def _internal_parser(self, sequence, fields=None):
1870 # The key is non-whitespace, non-colon characters before any colon.
1871 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1872 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1873 multi = re.compile(key_part + r"$")
1874 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1876 wanted_field = lambda f: fields is None or f in fields
1878 if isinstance(sequence, basestring):
1879 sequence = sequence.splitlines()
1883 for line in self.gpg_stripped_paragraph(sequence):
1884 m = single.match(line)
1887 self[curkey] = content
1889 if not wanted_field(m.group('key')):
1893 curkey = m.group('key')
1894 content = m.group('data')
1897 m = multi.match(line)
1900 self[curkey] = content
1902 if not wanted_field(m.group('key')):
1906 curkey = m.group('key')
1910 m = multidata.match(line)
1912 content += '\n' + line # XXX not m.group('data')?
1916 self[curkey] = content
1919 class DBSource(ORMObject):
1920 def __init__(self, source = None, version = None, maintainer = None, \
1921 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1922 self.source = source
1923 self.version = version
1924 self.maintainer = maintainer
1925 self.changedby = changedby
1926 self.poolfile = poolfile
1927 self.install_date = install_date
1928 self.fingerprint = fingerprint
1932 return self.source_id
1934 def properties(self):
1935 return ['source', 'source_id', 'maintainer', 'changedby', \
1936 'fingerprint', 'poolfile', 'version', 'suites_count', \
1937 'install_date', 'binaries_count', 'uploaders_count']
1939 def not_null_constraints(self):
1940 return ['source', 'version', 'install_date', 'maintainer', \
1941 'changedby', 'poolfile']
1943 def read_control_fields(self):
1945 Reads the control information from a dsc
1948 @return: fields is the dsc information in a dictionary form
1950 fullpath = self.poolfile.fullpath
1951 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1954 metadata = association_proxy('key', 'value')
1956 def scan_contents(self):
1958 Returns a set of names for non directories. The path names are
1959 normalized after converting them from either utf-8 or iso8859-1
1962 fullpath = self.poolfile.fullpath
1963 from daklib.contents import UnpackedSource
1964 unpacked = UnpackedSource(fullpath)
1966 for name in unpacked.get_all_filenames():
1967 # enforce proper utf-8 encoding
1969 name.decode('utf-8')
1970 except UnicodeDecodeError:
1971 name = name.decode('iso8859-1').encode('utf-8')
1975 __all__.append('DBSource')
1978 def source_exists(source, source_version, suites = ["any"], session=None):
1980 Ensure that source exists somewhere in the archive for the binary
1981 upload being processed.
1982 1. exact match => 1.0-3
1983 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
1985 @type source: string
1986 @param source: source name
1988 @type source_version: string
1989 @param source_version: expected source version
1992 @param suites: list of suites to check in, default I{any}
1994 @type session: Session
1995 @param session: Optional SQLA session object (a temporary one will be
1996 generated if not supplied)
1999 @return: returns 1 if a source with expected version is found, otherwise 0
2006 from daklib.regexes import re_bin_only_nmu
2007 orig_source_version = re_bin_only_nmu.sub('', source_version)
2009 for suite in suites:
2010 q = session.query(DBSource).filter_by(source=source). \
2011 filter(DBSource.version.in_([source_version, orig_source_version]))
2013 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2014 s = get_suite(suite, session)
2016 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2017 considered_suites = [ vc.reference for vc in enhances_vcs ]
2018 considered_suites.append(s)
2020 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2025 # No source found so return not ok
2030 __all__.append('source_exists')
2033 def get_suites_source_in(source, session=None):
2035 Returns list of Suite objects which given C{source} name is in
2038 @param source: DBSource package name to search for
2041 @return: list of Suite objects for the given source
2044 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2046 __all__.append('get_suites_source_in')
2049 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2051 Returns list of DBSource objects for given C{source} name and other parameters
2054 @param source: DBSource package name to search for
2056 @type version: str or None
2057 @param version: DBSource version name to search for or None if not applicable
2059 @type dm_upload_allowed: bool
2060 @param dm_upload_allowed: If None, no effect. If True or False, only
2061 return packages with that dm_upload_allowed setting
2063 @type session: Session
2064 @param session: Optional SQL session object (a temporary one will be
2065 generated if not supplied)
2068 @return: list of DBSource objects for the given name (may be empty)
2071 q = session.query(DBSource).filter_by(source=source)
2073 if version is not None:
2074 q = q.filter_by(version=version)
2076 if dm_upload_allowed is not None:
2077 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2081 __all__.append('get_sources_from_name')
2083 # FIXME: This function fails badly if it finds more than 1 source package and
2084 # its implementation is trivial enough to be inlined.
2086 def get_source_in_suite(source, suite, session=None):
2088 Returns a DBSource object for a combination of C{source} and C{suite}.
2090 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2091 - B{suite} - a suite name, eg. I{unstable}
2093 @type source: string
2094 @param source: source package name
2097 @param suite: the suite name
2100 @return: the version for I{source} in I{suite}
2104 q = get_suite(suite, session).get_sources(source)
2107 except NoResultFound:
2110 __all__.append('get_source_in_suite')
2113 def import_metadata_into_db(obj, session=None):
2115 This routine works on either DBBinary or DBSource objects and imports
2116 their metadata into the database
2118 fields = obj.read_control_fields()
2119 for k in fields.keys():
2122 val = str(fields[k])
2123 except UnicodeEncodeError:
2124 # Fall back to UTF-8
2126 val = fields[k].encode('utf-8')
2127 except UnicodeEncodeError:
2128 # Finally try iso8859-1
2129 val = fields[k].encode('iso8859-1')
2130 # Otherwise we allow the exception to percolate up and we cause
2131 # a reject as someone is playing silly buggers
2133 obj.metadata[get_or_set_metadatakey(k, session)] = val
2135 session.commit_or_flush()
2137 __all__.append('import_metadata_into_db')
2139 ################################################################################
2141 class SrcFormat(object):
2142 def __init__(self, *args, **kwargs):
2146 return '<SrcFormat %s>' % (self.format_name)
2148 __all__.append('SrcFormat')
2150 ################################################################################
2152 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2153 ('SuiteID', 'suite_id'),
2154 ('Version', 'version'),
2155 ('Origin', 'origin'),
2157 ('Description', 'description'),
2158 ('Untouchable', 'untouchable'),
2159 ('Announce', 'announce'),
2160 ('Codename', 'codename'),
2161 ('OverrideCodename', 'overridecodename'),
2162 ('ValidTime', 'validtime'),
2163 ('Priority', 'priority'),
2164 ('NotAutomatic', 'notautomatic'),
2165 ('CopyChanges', 'copychanges'),
2166 ('OverrideSuite', 'overridesuite')]
2168 # Why the heck don't we have any UNIQUE constraints in table suite?
2169 # TODO: Add UNIQUE constraints for appropriate columns.
2170 class Suite(ORMObject):
2171 def __init__(self, suite_name = None, version = None):
2172 self.suite_name = suite_name
2173 self.version = version
2175 def properties(self):
2176 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2179 def not_null_constraints(self):
2180 return ['suite_name']
2182 def __eq__(self, val):
2183 if isinstance(val, str):
2184 return (self.suite_name == val)
2185 # This signals to use the normal comparison operator
2186 return NotImplemented
2188 def __ne__(self, val):
2189 if isinstance(val, str):
2190 return (self.suite_name != val)
2191 # This signals to use the normal comparison operator
2192 return NotImplemented
2196 for disp, field in SUITE_FIELDS:
2197 val = getattr(self, field, None)
2199 ret.append("%s: %s" % (disp, val))
2201 return "\n".join(ret)
2203 def get_architectures(self, skipsrc=False, skipall=False):
2205 Returns list of Architecture objects
2207 @type skipsrc: boolean
2208 @param skipsrc: Whether to skip returning the 'source' architecture entry
2211 @type skipall: boolean
2212 @param skipall: Whether to skip returning the 'all' architecture entry
2216 @return: list of Architecture objects for the given name (may be empty)
2219 q = object_session(self).query(Architecture).with_parent(self)
2221 q = q.filter(Architecture.arch_string != 'source')
2223 q = q.filter(Architecture.arch_string != 'all')
2224 return q.order_by(Architecture.arch_string).all()
2226 def get_sources(self, source):
2228 Returns a query object representing DBSource that is part of C{suite}.
2230 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2232 @type source: string
2233 @param source: source package name
2235 @rtype: sqlalchemy.orm.query.Query
2236 @return: a query of DBSource
2240 session = object_session(self)
2241 return session.query(DBSource).filter_by(source = source). \
2244 def get_overridesuite(self):
2245 if self.overridesuite is None:
2248 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2252 return os.path.join(self.archive.path, 'dists', self.suite_name)
2254 __all__.append('Suite')
2257 def get_suite(suite, session=None):
2259 Returns Suite object for given C{suite name}.
2262 @param suite: The name of the suite
2264 @type session: Session
2265 @param session: Optional SQLA session object (a temporary one will be
2266 generated if not supplied)
2269 @return: Suite object for the requested suite name (None if not present)
2272 q = session.query(Suite).filter_by(suite_name=suite)
2276 except NoResultFound:
2279 __all__.append('get_suite')
2281 ################################################################################
2284 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2286 Returns list of Architecture objects for given C{suite} name. The list is
2287 empty if suite does not exist.
2290 @param suite: Suite name to search for
2292 @type skipsrc: boolean
2293 @param skipsrc: Whether to skip returning the 'source' architecture entry
2296 @type skipall: boolean
2297 @param skipall: Whether to skip returning the 'all' architecture entry
2300 @type session: Session
2301 @param session: Optional SQL session object (a temporary one will be
2302 generated if not supplied)
2305 @return: list of Architecture objects for the given name (may be empty)
2309 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2310 except AttributeError:
2313 __all__.append('get_suite_architectures')
2315 ################################################################################
2317 class Uid(ORMObject):
2318 def __init__(self, uid = None, name = None):
2322 def __eq__(self, val):
2323 if isinstance(val, str):
2324 return (self.uid == val)
2325 # This signals to use the normal comparison operator
2326 return NotImplemented
2328 def __ne__(self, val):
2329 if isinstance(val, str):
2330 return (self.uid != val)
2331 # This signals to use the normal comparison operator
2332 return NotImplemented
2334 def properties(self):
2335 return ['uid', 'name', 'fingerprint']
2337 def not_null_constraints(self):
2340 __all__.append('Uid')
2343 def get_or_set_uid(uidname, session=None):
2345 Returns uid object for given uidname.
2347 If no matching uidname is found, a row is inserted.
2349 @type uidname: string
2350 @param uidname: The uid to add
2352 @type session: SQLAlchemy
2353 @param session: Optional SQL session object (a temporary one will be
2354 generated if not supplied). If not passed, a commit will be performed at
2355 the end of the function, otherwise the caller is responsible for commiting.
2358 @return: the uid object for the given uidname
2361 q = session.query(Uid).filter_by(uid=uidname)
2365 except NoResultFound:
2369 session.commit_or_flush()
2374 __all__.append('get_or_set_uid')
2377 def get_uid_from_fingerprint(fpr, session=None):
2378 q = session.query(Uid)
2379 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2383 except NoResultFound:
2386 __all__.append('get_uid_from_fingerprint')
2388 ################################################################################
2390 class MetadataKey(ORMObject):
2391 def __init__(self, key = None):
2394 def properties(self):
2397 def not_null_constraints(self):
2400 __all__.append('MetadataKey')
2403 def get_or_set_metadatakey(keyname, session=None):
2405 Returns MetadataKey object for given uidname.
2407 If no matching keyname is found, a row is inserted.
2409 @type uidname: string
2410 @param uidname: The keyname to add
2412 @type session: SQLAlchemy
2413 @param session: Optional SQL session object (a temporary one will be
2414 generated if not supplied). If not passed, a commit will be performed at
2415 the end of the function, otherwise the caller is responsible for commiting.
2418 @return: the metadatakey object for the given keyname
2421 q = session.query(MetadataKey).filter_by(key=keyname)
2425 except NoResultFound:
2426 ret = MetadataKey(keyname)
2428 session.commit_or_flush()
2432 __all__.append('get_or_set_metadatakey')
2434 ################################################################################
2436 class BinaryMetadata(ORMObject):
2437 def __init__(self, key = None, value = None, binary = None):
2440 self.binary = binary
2442 def properties(self):
2443 return ['binary', 'key', 'value']
2445 def not_null_constraints(self):
2448 __all__.append('BinaryMetadata')
2450 ################################################################################
2452 class SourceMetadata(ORMObject):
2453 def __init__(self, key = None, value = None, source = None):
2456 self.source = source
2458 def properties(self):
2459 return ['source', 'key', 'value']
2461 def not_null_constraints(self):
2464 __all__.append('SourceMetadata')
2466 ################################################################################
2468 class VersionCheck(ORMObject):
2469 def __init__(self, *args, **kwargs):
2472 def properties(self):
2473 #return ['suite_id', 'check', 'reference_id']
2476 def not_null_constraints(self):
2477 return ['suite', 'check', 'reference']
2479 __all__.append('VersionCheck')
2482 def get_version_checks(suite_name, check = None, session = None):
2483 suite = get_suite(suite_name, session)
2485 # Make sure that what we return is iterable so that list comprehensions
2486 # involving this don't cause a traceback
2488 q = session.query(VersionCheck).filter_by(suite=suite)
2490 q = q.filter_by(check=check)
2493 __all__.append('get_version_checks')
2495 ################################################################################
2497 class DBConn(object):
2499 database module init.
2503 def __init__(self, *args, **kwargs):
2504 self.__dict__ = self.__shared_state
2506 if not getattr(self, 'initialised', False):
2507 self.initialised = True
2508 self.debug = kwargs.has_key('debug')
2511 def __setuptables(self):
2514 'acl_architecture_map',
2515 'acl_fingerprint_map',
2522 'binaries_metadata',
2529 'external_overrides',
2530 'extra_src_references',
2532 'files_archive_map',
2538 # TODO: the maintainer column in table override should be removed.
2542 'policy_queue_upload',
2543 'policy_queue_upload_binaries_map',
2544 'policy_queue_byhand_file',
2555 'suite_architectures',
2556 'suite_build_queue_copy',
2557 'suite_src_formats',
2563 'almost_obsolete_all_associations',
2564 'almost_obsolete_src_associations',
2565 'any_associations_source',
2566 'bin_associations_binaries',
2567 'binaries_suite_arch',
2570 'newest_all_associations',
2571 'newest_any_associations',
2573 'newest_src_association',
2574 'obsolete_all_associations',
2575 'obsolete_any_associations',
2576 'obsolete_any_by_all_associations',
2577 'obsolete_src_associations',
2579 'src_associations_bin',
2580 'src_associations_src',
2581 'suite_arch_by_name',
2584 for table_name in tables:
2585 table = Table(table_name, self.db_meta, \
2586 autoload=True, useexisting=True)
2587 setattr(self, 'tbl_%s' % table_name, table)
2589 for view_name in views:
2590 view = Table(view_name, self.db_meta, autoload=True)
2591 setattr(self, 'view_%s' % view_name, view)
2593 def __setupmappers(self):
2594 mapper(Architecture, self.tbl_architecture,
2595 properties = dict(arch_id = self.tbl_architecture.c.id,
2596 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2597 order_by=self.tbl_suite.c.suite_name,
2598 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2599 extension = validator)
2601 mapper(ACL, self.tbl_acl,
2603 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2604 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2605 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2606 per_source = relation(ACLPerSource, collection_class=set),
2609 mapper(ACLPerSource, self.tbl_acl_per_source,
2611 acl = relation(ACL),
2612 fingerprint = relation(Fingerprint),
2615 mapper(Archive, self.tbl_archive,
2616 properties = dict(archive_id = self.tbl_archive.c.id,
2617 archive_name = self.tbl_archive.c.name))
2619 mapper(ArchiveFile, self.tbl_files_archive_map,
2620 properties = dict(archive = relation(Archive, backref='files'),
2621 component = relation(Component),
2622 file = relation(PoolFile, backref='archives')))
2624 mapper(BuildQueue, self.tbl_build_queue,
2625 properties = dict(queue_id = self.tbl_build_queue.c.id,
2626 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2628 mapper(DBBinary, self.tbl_binaries,
2629 properties = dict(binary_id = self.tbl_binaries.c.id,
2630 package = self.tbl_binaries.c.package,
2631 version = self.tbl_binaries.c.version,
2632 maintainer_id = self.tbl_binaries.c.maintainer,
2633 maintainer = relation(Maintainer),
2634 source_id = self.tbl_binaries.c.source,
2635 source = relation(DBSource, backref='binaries'),
2636 arch_id = self.tbl_binaries.c.architecture,
2637 architecture = relation(Architecture),
2638 poolfile_id = self.tbl_binaries.c.file,
2639 poolfile = relation(PoolFile),
2640 binarytype = self.tbl_binaries.c.type,
2641 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2642 fingerprint = relation(Fingerprint),
2643 install_date = self.tbl_binaries.c.install_date,
2644 suites = relation(Suite, secondary=self.tbl_bin_associations,
2645 backref=backref('binaries', lazy='dynamic')),
2646 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2647 backref=backref('extra_binary_references', lazy='dynamic')),
2648 key = relation(BinaryMetadata, cascade='all',
2649 collection_class=attribute_mapped_collection('key'))),
2650 extension = validator)
2652 mapper(Component, self.tbl_component,
2653 properties = dict(component_id = self.tbl_component.c.id,
2654 component_name = self.tbl_component.c.name),
2655 extension = validator)
2657 mapper(DBConfig, self.tbl_config,
2658 properties = dict(config_id = self.tbl_config.c.id))
2660 mapper(DSCFile, self.tbl_dsc_files,
2661 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2662 source_id = self.tbl_dsc_files.c.source,
2663 source = relation(DBSource),
2664 poolfile_id = self.tbl_dsc_files.c.file,
2665 poolfile = relation(PoolFile)))
2667 mapper(ExternalOverride, self.tbl_external_overrides,
2669 suite_id = self.tbl_external_overrides.c.suite,
2670 suite = relation(Suite),
2671 component_id = self.tbl_external_overrides.c.component,
2672 component = relation(Component)))
2674 mapper(PoolFile, self.tbl_files,
2675 properties = dict(file_id = self.tbl_files.c.id,
2676 filesize = self.tbl_files.c.size),
2677 extension = validator)
2679 mapper(Fingerprint, self.tbl_fingerprint,
2680 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2681 uid_id = self.tbl_fingerprint.c.uid,
2682 uid = relation(Uid),
2683 keyring_id = self.tbl_fingerprint.c.keyring,
2684 keyring = relation(Keyring),
2685 acl = relation(ACL)),
2686 extension = validator)
2688 mapper(Keyring, self.tbl_keyrings,
2689 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2690 keyring_id = self.tbl_keyrings.c.id))
2692 mapper(DBChange, self.tbl_changes,
2693 properties = dict(change_id = self.tbl_changes.c.id,
2694 seen = self.tbl_changes.c.seen,
2695 source = self.tbl_changes.c.source,
2696 binaries = self.tbl_changes.c.binaries,
2697 architecture = self.tbl_changes.c.architecture,
2698 distribution = self.tbl_changes.c.distribution,
2699 urgency = self.tbl_changes.c.urgency,
2700 maintainer = self.tbl_changes.c.maintainer,
2701 changedby = self.tbl_changes.c.changedby,
2702 date = self.tbl_changes.c.date,
2703 version = self.tbl_changes.c.version))
2705 mapper(Maintainer, self.tbl_maintainer,
2706 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2707 maintains_sources = relation(DBSource, backref='maintainer',
2708 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2709 changed_sources = relation(DBSource, backref='changedby',
2710 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2711 extension = validator)
2713 mapper(NewComment, self.tbl_new_comments,
2714 properties = dict(comment_id = self.tbl_new_comments.c.id))
2716 mapper(Override, self.tbl_override,
2717 properties = dict(suite_id = self.tbl_override.c.suite,
2718 suite = relation(Suite, \
2719 backref=backref('overrides', lazy='dynamic')),
2720 package = self.tbl_override.c.package,
2721 component_id = self.tbl_override.c.component,
2722 component = relation(Component, \
2723 backref=backref('overrides', lazy='dynamic')),
2724 priority_id = self.tbl_override.c.priority,
2725 priority = relation(Priority, \
2726 backref=backref('overrides', lazy='dynamic')),
2727 section_id = self.tbl_override.c.section,
2728 section = relation(Section, \
2729 backref=backref('overrides', lazy='dynamic')),
2730 overridetype_id = self.tbl_override.c.type,
2731 overridetype = relation(OverrideType, \
2732 backref=backref('overrides', lazy='dynamic'))))
2734 mapper(OverrideType, self.tbl_override_type,
2735 properties = dict(overridetype = self.tbl_override_type.c.type,
2736 overridetype_id = self.tbl_override_type.c.id))
2738 mapper(PolicyQueue, self.tbl_policy_queue,
2739 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2740 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2742 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2744 changes = relation(DBChange),
2745 policy_queue = relation(PolicyQueue, backref='uploads'),
2746 target_suite = relation(Suite),
2747 source = relation(DBSource),
2748 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2751 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2753 upload = relation(PolicyQueueUpload, backref='byhand'),
2757 mapper(Priority, self.tbl_priority,
2758 properties = dict(priority_id = self.tbl_priority.c.id))
2760 mapper(Section, self.tbl_section,
2761 properties = dict(section_id = self.tbl_section.c.id,
2762 section=self.tbl_section.c.section))
2764 mapper(DBSource, self.tbl_source,
2765 properties = dict(source_id = self.tbl_source.c.id,
2766 version = self.tbl_source.c.version,
2767 maintainer_id = self.tbl_source.c.maintainer,
2768 poolfile_id = self.tbl_source.c.file,
2769 poolfile = relation(PoolFile),
2770 fingerprint_id = self.tbl_source.c.sig_fpr,
2771 fingerprint = relation(Fingerprint),
2772 changedby_id = self.tbl_source.c.changedby,
2773 srcfiles = relation(DSCFile,
2774 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2775 suites = relation(Suite, secondary=self.tbl_src_associations,
2776 backref=backref('sources', lazy='dynamic')),
2777 uploaders = relation(Maintainer,
2778 secondary=self.tbl_src_uploaders),
2779 key = relation(SourceMetadata, cascade='all',
2780 collection_class=attribute_mapped_collection('key'))),
2781 extension = validator)
2783 mapper(SrcFormat, self.tbl_src_format,
2784 properties = dict(src_format_id = self.tbl_src_format.c.id,
2785 format_name = self.tbl_src_format.c.format_name))
2787 mapper(Suite, self.tbl_suite,
2788 properties = dict(suite_id = self.tbl_suite.c.id,
2789 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2790 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2791 copy_queues = relation(BuildQueue,
2792 secondary=self.tbl_suite_build_queue_copy),
2793 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2794 backref=backref('suites', lazy='dynamic')),
2795 archive = relation(Archive, backref='suites'),
2796 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set)),
2797 extension = validator)
2799 mapper(Uid, self.tbl_uid,
2800 properties = dict(uid_id = self.tbl_uid.c.id,
2801 fingerprint = relation(Fingerprint)),
2802 extension = validator)
2804 mapper(BinContents, self.tbl_bin_contents,
2806 binary = relation(DBBinary,
2807 backref=backref('contents', lazy='dynamic', cascade='all')),
2808 file = self.tbl_bin_contents.c.file))
2810 mapper(SrcContents, self.tbl_src_contents,
2812 source = relation(DBSource,
2813 backref=backref('contents', lazy='dynamic', cascade='all')),
2814 file = self.tbl_src_contents.c.file))
2816 mapper(MetadataKey, self.tbl_metadata_keys,
2818 key_id = self.tbl_metadata_keys.c.key_id,
2819 key = self.tbl_metadata_keys.c.key))
2821 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2823 binary_id = self.tbl_binaries_metadata.c.bin_id,
2824 binary = relation(DBBinary),
2825 key_id = self.tbl_binaries_metadata.c.key_id,
2826 key = relation(MetadataKey),
2827 value = self.tbl_binaries_metadata.c.value))
2829 mapper(SourceMetadata, self.tbl_source_metadata,
2831 source_id = self.tbl_source_metadata.c.src_id,
2832 source = relation(DBSource),
2833 key_id = self.tbl_source_metadata.c.key_id,
2834 key = relation(MetadataKey),
2835 value = self.tbl_source_metadata.c.value))
2837 mapper(VersionCheck, self.tbl_version_check,
2839 suite_id = self.tbl_version_check.c.suite,
2840 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2841 reference_id = self.tbl_version_check.c.reference,
2842 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2844 ## Connection functions
2845 def __createconn(self):
2846 from config import Config
2848 if cnf.has_key("DB::Service"):
2849 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2850 elif cnf.has_key("DB::Host"):
2852 connstr = "postgresql://%s" % cnf["DB::Host"]
2853 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2854 connstr += ":%s" % cnf["DB::Port"]
2855 connstr += "/%s" % cnf["DB::Name"]
2858 connstr = "postgresql:///%s" % cnf["DB::Name"]
2859 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2860 connstr += "?port=%s" % cnf["DB::Port"]
2862 engine_args = { 'echo': self.debug }
2863 if cnf.has_key('DB::PoolSize'):
2864 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2865 if cnf.has_key('DB::MaxOverflow'):
2866 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2867 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
2868 cnf['DB::Unicode'] == 'false':
2869 engine_args['use_native_unicode'] = False
2871 # Monkey patch a new dialect in in order to support service= syntax
2872 import sqlalchemy.dialects.postgresql
2873 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2874 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2875 def create_connect_args(self, url):
2876 if str(url).startswith('postgresql://service='):
2878 servicename = str(url)[21:]
2879 return (['service=%s' % servicename], {})
2881 return PGDialect_psycopg2.create_connect_args(self, url)
2883 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2886 self.db_pg = create_engine(connstr, **engine_args)
2887 self.db_meta = MetaData()
2888 self.db_meta.bind = self.db_pg
2889 self.db_smaker = sessionmaker(bind=self.db_pg,
2893 self.__setuptables()
2894 self.__setupmappers()
2896 except OperationalError as e:
2898 utils.fubar("Cannot connect to database (%s)" % str(e))
2900 self.pid = os.getpid()
2902 def session(self, work_mem = 0):
2904 Returns a new session object. If a work_mem parameter is provided a new
2905 transaction is started and the work_mem parameter is set for this
2906 transaction. The work_mem parameter is measured in MB. A default value
2907 will be used if the parameter is not set.
2909 # reinitialize DBConn in new processes
2910 if self.pid != os.getpid():
2913 session = self.db_smaker()
2915 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2918 __all__.append('DBConn')