5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
45 from daklib.gpg import SignedFile
52 import simplejson as json
54 from datetime import datetime, timedelta
55 from errno import ENOENT
56 from tempfile import mkstemp, mkdtemp
57 from subprocess import Popen, PIPE
58 from tarfile import TarFile
60 from inspect import getargspec
63 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
65 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
66 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
67 from sqlalchemy import types as sqltypes
68 from sqlalchemy.orm.collections import attribute_mapped_collection
69 from sqlalchemy.ext.associationproxy import association_proxy
71 # Don't remove this, we re-export the exceptions to scripts which import us
72 from sqlalchemy.exc import *
73 from sqlalchemy.orm.exc import NoResultFound
75 # Only import Config until Queue stuff is changed to store its config
77 from config import Config
78 from textutils import fix_maintainer
79 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
81 # suppress some deprecation warnings in squeeze related to sqlalchemy
83 warnings.filterwarnings('ignore', \
84 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
86 warnings.filterwarnings('ignore', \
87 "Predicate of partial index .* ignored during reflection", \
91 ################################################################################
93 # Patch in support for the debversion field type so that it works during
97 # that is for sqlalchemy 0.6
98 UserDefinedType = sqltypes.UserDefinedType
100 # this one for sqlalchemy 0.5
101 UserDefinedType = sqltypes.TypeEngine
103 class DebVersion(UserDefinedType):
104 def get_col_spec(self):
107 def bind_processor(self, dialect):
110 # ' = None' is needed for sqlalchemy 0.5:
111 def result_processor(self, dialect, coltype = None):
114 sa_major_version = sqlalchemy.__version__[0:3]
115 if sa_major_version in ["0.5", "0.6", "0.7"]:
116 from sqlalchemy.databases import postgres
117 postgres.ischema_names['debversion'] = DebVersion
119 raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py")
121 ################################################################################
123 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
125 ################################################################################
127 def session_wrapper(fn):
129 Wrapper around common ".., session=None):" handling. If the wrapped
130 function is called without passing 'session', we create a local one
131 and destroy it when the function ends.
133 Also attaches a commit_or_flush method to the session; if we created a
134 local session, this is a synonym for session.commit(), otherwise it is a
135 synonym for session.flush().
138 def wrapped(*args, **kwargs):
139 private_transaction = False
141 # Find the session object
142 session = kwargs.get('session')
145 if len(args) <= len(getargspec(fn)[0]) - 1:
146 # No session specified as last argument or in kwargs
147 private_transaction = True
148 session = kwargs['session'] = DBConn().session()
150 # Session is last argument in args
154 session = args[-1] = DBConn().session()
155 private_transaction = True
157 if private_transaction:
158 session.commit_or_flush = session.commit
160 session.commit_or_flush = session.flush
163 return fn(*args, **kwargs)
165 if private_transaction:
166 # We created a session; close it.
169 wrapped.__doc__ = fn.__doc__
170 wrapped.func_name = fn.func_name
174 __all__.append('session_wrapper')
176 ################################################################################
178 class ORMObject(object):
180 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
181 derived classes must implement the properties() method.
184 def properties(self):
186 This method should be implemented by all derived classes and returns a
187 list of the important properties. The properties 'created' and
188 'modified' will be added automatically. A suffix '_count' should be
189 added to properties that are lists or query objects. The most important
190 property name should be returned as the first element in the list
191 because it is used by repr().
197 Returns a JSON representation of the object based on the properties
198 returned from the properties() method.
201 # add created and modified
202 all_properties = self.properties() + ['created', 'modified']
203 for property in all_properties:
204 # check for list or query
205 if property[-6:] == '_count':
206 real_property = property[:-6]
207 if not hasattr(self, real_property):
209 value = getattr(self, real_property)
210 if hasattr(value, '__len__'):
213 elif hasattr(value, 'count'):
214 # query (but not during validation)
215 if self.in_validation:
217 value = value.count()
219 raise KeyError('Do not understand property %s.' % property)
221 if not hasattr(self, property):
224 value = getattr(self, property)
228 elif isinstance(value, ORMObject):
229 # use repr() for ORMObject types
232 # we want a string for all other types because json cannot
235 data[property] = value
236 return json.dumps(data)
240 Returns the name of the class.
242 return type(self).__name__
246 Returns a short string representation of the object using the first
247 element from the properties() method.
249 primary_property = self.properties()[0]
250 value = getattr(self, primary_property)
251 return '<%s %s>' % (self.classname(), str(value))
255 Returns a human readable form of the object using the properties()
258 return '<%s %s>' % (self.classname(), self.json())
260 def not_null_constraints(self):
262 Returns a list of properties that must be not NULL. Derived classes
263 should override this method if needed.
267 validation_message = \
268 "Validation failed because property '%s' must not be empty in object\n%s"
270 in_validation = False
274 This function validates the not NULL constraints as returned by
275 not_null_constraints(). It raises the DBUpdateError exception if
278 for property in self.not_null_constraints():
279 # TODO: It is a bit awkward that the mapper configuration allow
280 # directly setting the numeric _id columns. We should get rid of it
282 if hasattr(self, property + '_id') and \
283 getattr(self, property + '_id') is not None:
285 if not hasattr(self, property) or getattr(self, property) is None:
286 # str() might lead to races due to a 2nd flush
287 self.in_validation = True
288 message = self.validation_message % (property, str(self))
289 self.in_validation = False
290 raise DBUpdateError(message)
294 def get(cls, primary_key, session = None):
296 This is a support function that allows getting an object by its primary
299 Architecture.get(3[, session])
301 instead of the more verbose
303 session.query(Architecture).get(3)
305 return session.query(cls).get(primary_key)
307 def session(self, replace = False):
309 Returns the current session that is associated with the object. May
310 return None is object is in detached state.
313 return object_session(self)
315 def clone(self, session = None):
317 Clones the current object in a new session and returns the new clone. A
318 fresh session is created if the optional session parameter is not
319 provided. The function will fail if a session is provided and has
322 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
323 an existing object to allow several threads to work with their own
324 instances of an ORMObject.
326 WARNING: Only persistent (committed) objects can be cloned. Changes
327 made to the original object that are not committed yet will get lost.
328 The session of the new object will always be rolled back to avoid
332 if self.session() is None:
333 raise RuntimeError( \
334 'Method clone() failed for detached object:\n%s' % self)
335 self.session().flush()
336 mapper = object_mapper(self)
337 primary_key = mapper.primary_key_from_instance(self)
338 object_class = self.__class__
340 session = DBConn().session()
341 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
342 raise RuntimeError( \
343 'Method clone() failed due to unflushed changes in session.')
344 new_object = session.query(object_class).get(primary_key)
346 if new_object is None:
347 raise RuntimeError( \
348 'Method clone() failed for non-persistent object:\n%s' % self)
351 __all__.append('ORMObject')
353 ################################################################################
355 class Validator(MapperExtension):
357 This class calls the validate() method for each instance for the
358 'before_update' and 'before_insert' events. A global object validator is
359 used for configuring the individual mappers.
362 def before_update(self, mapper, connection, instance):
366 def before_insert(self, mapper, connection, instance):
370 validator = Validator()
372 ################################################################################
374 class Architecture(ORMObject):
375 def __init__(self, arch_string = None, description = None):
376 self.arch_string = arch_string
377 self.description = description
379 def __eq__(self, val):
380 if isinstance(val, str):
381 return (self.arch_string== val)
382 # This signals to use the normal comparison operator
383 return NotImplemented
385 def __ne__(self, val):
386 if isinstance(val, str):
387 return (self.arch_string != val)
388 # This signals to use the normal comparison operator
389 return NotImplemented
391 def properties(self):
392 return ['arch_string', 'arch_id', 'suites_count']
394 def not_null_constraints(self):
395 return ['arch_string']
397 __all__.append('Architecture')
400 def get_architecture(architecture, session=None):
402 Returns database id for given C{architecture}.
404 @type architecture: string
405 @param architecture: The name of the architecture
407 @type session: Session
408 @param session: Optional SQLA session object (a temporary one will be
409 generated if not supplied)
412 @return: Architecture object for the given arch (None if not present)
415 q = session.query(Architecture).filter_by(arch_string=architecture)
419 except NoResultFound:
422 __all__.append('get_architecture')
424 # TODO: should be removed because the implementation is too trivial
426 def get_architecture_suites(architecture, session=None):
428 Returns list of Suite objects for given C{architecture} name
430 @type architecture: str
431 @param architecture: Architecture name to search for
433 @type session: Session
434 @param session: Optional SQL session object (a temporary one will be
435 generated if not supplied)
438 @return: list of Suite objects for the given name (may be empty)
441 return get_architecture(architecture, session).suites
443 __all__.append('get_architecture_suites')
445 ################################################################################
447 class Archive(object):
448 def __init__(self, *args, **kwargs):
452 return '<Archive %s>' % self.archive_name
454 __all__.append('Archive')
457 def get_archive(archive, session=None):
459 returns database id for given C{archive}.
461 @type archive: string
462 @param archive: the name of the arhive
464 @type session: Session
465 @param session: Optional SQLA session object (a temporary one will be
466 generated if not supplied)
469 @return: Archive object for the given name (None if not present)
472 archive = archive.lower()
474 q = session.query(Archive).filter_by(archive_name=archive)
478 except NoResultFound:
481 __all__.append('get_archive')
483 ################################################################################
485 class ArchiveFile(object):
486 def __init__(self, archive=None, component=None, file=None):
487 self.archive = archive
488 self.component = component
492 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
494 __all__.append('ArchiveFile')
496 ################################################################################
498 class BinContents(ORMObject):
499 def __init__(self, file = None, binary = None):
503 def properties(self):
504 return ['file', 'binary']
506 __all__.append('BinContents')
508 ################################################################################
510 def subprocess_setup():
511 # Python installs a SIGPIPE handler by default. This is usually not what
512 # non-Python subprocesses expect.
513 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
515 class DBBinary(ORMObject):
516 def __init__(self, package = None, source = None, version = None, \
517 maintainer = None, architecture = None, poolfile = None, \
518 binarytype = 'deb', fingerprint=None):
519 self.package = package
521 self.version = version
522 self.maintainer = maintainer
523 self.architecture = architecture
524 self.poolfile = poolfile
525 self.binarytype = binarytype
526 self.fingerprint = fingerprint
530 return self.binary_id
532 def properties(self):
533 return ['package', 'version', 'maintainer', 'source', 'architecture', \
534 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
535 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
537 def not_null_constraints(self):
538 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
541 metadata = association_proxy('key', 'value')
543 def get_component_name(self):
544 return self.poolfile.location.component.component_name
546 def scan_contents(self):
548 Yields the contents of the package. Only regular files are yielded and
549 the path names are normalized after converting them from either utf-8
550 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
551 package does not contain any regular file.
553 fullpath = self.poolfile.fullpath
554 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
555 preexec_fn = subprocess_setup)
556 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
557 for member in tar.getmembers():
558 if not member.isdir():
559 name = normpath(member.name)
560 # enforce proper utf-8 encoding
563 except UnicodeDecodeError:
564 name = name.decode('iso8859-1').encode('utf-8')
570 def read_control(self):
572 Reads the control information from a binary.
575 @return: stanza text of the control section.
578 fullpath = self.poolfile.fullpath
579 deb_file = open(fullpath, 'r')
580 stanza = utils.deb_extract_control(deb_file)
585 def read_control_fields(self):
587 Reads the control information from a binary and return
591 @return: fields of the control section as a dictionary.
594 stanza = self.read_control()
595 return apt_pkg.TagSection(stanza)
597 __all__.append('DBBinary')
600 def get_suites_binary_in(package, session=None):
602 Returns list of Suite objects which given C{package} name is in
605 @param package: DBBinary package name to search for
608 @return: list of Suite objects for the given package
611 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
613 __all__.append('get_suites_binary_in')
616 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
618 Returns the component name of the newest binary package in suite_list or
619 None if no package is found. The result can be optionally filtered by a list
620 of architecture names.
623 @param package: DBBinary package name to search for
625 @type suite_list: list of str
626 @param suite_list: list of suite_name items
628 @type arch_list: list of str
629 @param arch_list: optional list of arch_string items that defaults to []
631 @rtype: str or NoneType
632 @return: name of component or None
635 q = session.query(DBBinary).filter_by(package = package). \
636 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
637 if len(arch_list) > 0:
638 q = q.join(DBBinary.architecture). \
639 filter(Architecture.arch_string.in_(arch_list))
640 binary = q.order_by(desc(DBBinary.version)).first()
644 return binary.get_component_name()
646 __all__.append('get_component_by_package_suite')
648 ################################################################################
650 class BinaryACL(object):
651 def __init__(self, *args, **kwargs):
655 return '<BinaryACL %s>' % self.binary_acl_id
657 __all__.append('BinaryACL')
659 ################################################################################
661 class BinaryACLMap(object):
662 def __init__(self, *args, **kwargs):
666 return '<BinaryACLMap %s>' % self.binary_acl_map_id
668 __all__.append('BinaryACLMap')
670 ################################################################################
675 ArchiveDir "%(archivepath)s";
676 OverrideDir "%(overridedir)s";
677 CacheDir "%(cachedir)s";
682 Packages::Compress ". bzip2 gzip";
683 Sources::Compress ". bzip2 gzip";
688 bindirectory "incoming"
693 BinOverride "override.sid.all3";
694 BinCacheDB "packages-accepted.db";
696 FileList "%(filelist)s";
699 Packages::Extensions ".deb .udeb";
702 bindirectory "incoming/"
705 BinOverride "override.sid.all3";
706 SrcOverride "override.sid.all3.src";
707 FileList "%(filelist)s";
711 class BuildQueue(object):
712 def __init__(self, *args, **kwargs):
716 return '<BuildQueue %s>' % self.queue_name
718 def write_metadata(self, starttime, force=False):
719 # Do we write out metafiles?
720 if not (force or self.generate_metadata):
723 session = DBConn().session().object_session(self)
725 fl_fd = fl_name = ac_fd = ac_name = None
727 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
728 startdir = os.getcwd()
731 # Grab files we want to include
732 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
733 newer += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
734 # Write file list with newer files
735 (fl_fd, fl_name) = mkstemp()
737 os.write(fl_fd, '%s\n' % n.fullpath)
742 # Write minimal apt.conf
743 # TODO: Remove hardcoding from template
744 (ac_fd, ac_name) = mkstemp()
745 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
747 'cachedir': cnf["Dir::Cache"],
748 'overridedir': cnf["Dir::Override"],
752 # Run apt-ftparchive generate
753 os.chdir(os.path.dirname(ac_name))
754 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
756 # Run apt-ftparchive release
757 # TODO: Eww - fix this
758 bname = os.path.basename(self.path)
762 # We have to remove the Release file otherwise it'll be included in the
765 os.unlink(os.path.join(bname, 'Release'))
769 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
771 # Crude hack with open and append, but this whole section is and should be redone.
772 if self.notautomatic:
773 release=open("Release", "a")
774 release.write("NotAutomatic: yes\n")
779 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
780 if cnf.has_key("Dinstall::SigningPubKeyring"):
781 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
783 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
785 # Move the files if we got this far
786 os.rename('Release', os.path.join(bname, 'Release'))
788 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
790 # Clean up any left behind files
817 def clean_and_update(self, starttime, Logger, dryrun=False):
818 """WARNING: This routine commits for you"""
819 session = DBConn().session().object_session(self)
821 if self.generate_metadata and not dryrun:
822 self.write_metadata(starttime)
824 # Grab files older than our execution time
825 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
826 older += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
832 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
834 Logger.log(["I: Removing %s from the queue" % o.fullpath])
835 os.unlink(o.fullpath)
838 # If it wasn't there, don't worry
839 if e.errno == ENOENT:
842 # TODO: Replace with proper logging call
843 Logger.log(["E: Could not remove %s" % o.fullpath])
850 for f in os.listdir(self.path):
851 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
854 if not self.contains_filename(f):
855 fp = os.path.join(self.path, f)
857 Logger.log(["I: Would remove unused link %s" % fp])
859 Logger.log(["I: Removing unused link %s" % fp])
863 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
865 def contains_filename(self, filename):
868 @returns True if filename is supposed to be in the queue; False otherwise
870 session = DBConn().session().object_session(self)
871 if session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id, filename = filename).count() > 0:
873 elif session.query(BuildQueuePolicyFile).filter_by(build_queue = self, filename = filename).count() > 0:
877 def add_file_from_pool(self, poolfile):
878 """Copies a file into the pool. Assumes that the PoolFile object is
879 attached to the same SQLAlchemy session as the Queue object is.
881 The caller is responsible for committing after calling this function."""
882 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
884 # Check if we have a file of this name or this ID already
885 for f in self.queuefiles:
886 if (f.fileid is not None and f.fileid == poolfile.file_id) or \
887 (f.poolfile is not None and f.poolfile.filename == poolfile_basename):
888 # In this case, update the BuildQueueFile entry so we
889 # don't remove it too early
890 f.lastused = datetime.now()
891 DBConn().session().object_session(poolfile).add(f)
894 # Prepare BuildQueueFile object
895 qf = BuildQueueFile()
896 qf.build_queue_id = self.queue_id
897 qf.filename = poolfile_basename
899 targetpath = poolfile.fullpath
900 queuepath = os.path.join(self.path, poolfile_basename)
904 # We need to copy instead of symlink
906 utils.copy(targetpath, queuepath)
907 # NULL in the fileid field implies a copy
910 os.symlink(targetpath, queuepath)
911 qf.fileid = poolfile.file_id
912 except FileExistsError:
913 if not poolfile.identical_to(queuepath):
918 # Get the same session as the PoolFile is using and add the qf to it
919 DBConn().session().object_session(poolfile).add(qf)
923 def add_changes_from_policy_queue(self, policyqueue, changes):
925 Copies a changes from a policy queue together with its poolfiles.
927 @type policyqueue: PolicyQueue
928 @param policyqueue: policy queue to copy the changes from
930 @type changes: DBChange
931 @param changes: changes to copy to this build queue
933 for policyqueuefile in changes.files:
934 self.add_file_from_policy_queue(policyqueue, policyqueuefile)
935 for poolfile in changes.poolfiles:
936 self.add_file_from_pool(poolfile)
938 def add_file_from_policy_queue(self, policyqueue, policyqueuefile):
940 Copies a file from a policy queue.
941 Assumes that the policyqueuefile is attached to the same SQLAlchemy
942 session as the Queue object is. The caller is responsible for
943 committing after calling this function.
945 @type policyqueue: PolicyQueue
946 @param policyqueue: policy queue to copy the file from
948 @type policyqueuefile: ChangePendingFile
949 @param policyqueuefile: file to be added to the build queue
951 session = DBConn().session().object_session(policyqueuefile)
953 # Is the file already there?
955 f = session.query(BuildQueuePolicyFile).filter_by(build_queue=self, file=policyqueuefile).one()
956 f.lastused = datetime.now()
958 except NoResultFound:
959 pass # continue below
961 # We have to add the file.
962 f = BuildQueuePolicyFile()
964 f.file = policyqueuefile
965 f.filename = policyqueuefile.filename
967 source = os.path.join(policyqueue.path, policyqueuefile.filename)
970 # Always copy files from policy queues as they might move around.
972 utils.copy(source, target)
973 except FileExistsError:
974 if not policyqueuefile.identical_to(target):
982 __all__.append('BuildQueue')
985 def get_build_queue(queuename, session=None):
987 Returns BuildQueue object for given C{queue name}, creating it if it does not
990 @type queuename: string
991 @param queuename: The name of the queue
993 @type session: Session
994 @param session: Optional SQLA session object (a temporary one will be
995 generated if not supplied)
998 @return: BuildQueue object for the given queue
1001 q = session.query(BuildQueue).filter_by(queue_name=queuename)
1005 except NoResultFound:
1008 __all__.append('get_build_queue')
1010 ################################################################################
1012 class BuildQueueFile(object):
1014 BuildQueueFile represents a file in a build queue coming from a pool.
1017 def __init__(self, *args, **kwargs):
1021 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
1025 return os.path.join(self.buildqueue.path, self.filename)
1028 __all__.append('BuildQueueFile')
1030 ################################################################################
1032 class BuildQueuePolicyFile(object):
1034 BuildQueuePolicyFile represents a file in a build queue that comes from a
1035 policy queue (and not a pool).
1038 def __init__(self, *args, **kwargs):
1042 #def filename(self):
1043 # return self.file.filename
1047 return os.path.join(self.build_queue.path, self.filename)
1049 __all__.append('BuildQueuePolicyFile')
1051 ################################################################################
1053 class ChangePendingBinary(object):
1054 def __init__(self, *args, **kwargs):
1058 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
1060 __all__.append('ChangePendingBinary')
1062 ################################################################################
1064 class ChangePendingFile(object):
1065 def __init__(self, *args, **kwargs):
1069 return '<ChangePendingFile %s>' % self.change_pending_file_id
1071 def identical_to(self, filename):
1073 compare size and hash with the given file
1076 @return: true if the given file has the same size and hash as this object; false otherwise
1078 st = os.stat(filename)
1079 if self.size != st.st_size:
1082 f = open(filename, "r")
1083 sha256sum = apt_pkg.sha256sum(f)
1084 if sha256sum != self.sha256sum:
1089 __all__.append('ChangePendingFile')
1091 ################################################################################
1093 class ChangePendingSource(object):
1094 def __init__(self, *args, **kwargs):
1098 return '<ChangePendingSource %s>' % self.change_pending_source_id
1100 __all__.append('ChangePendingSource')
1102 ################################################################################
1104 class Component(ORMObject):
1105 def __init__(self, component_name = None):
1106 self.component_name = component_name
1108 def __eq__(self, val):
1109 if isinstance(val, str):
1110 return (self.component_name == val)
1111 # This signals to use the normal comparison operator
1112 return NotImplemented
1114 def __ne__(self, val):
1115 if isinstance(val, str):
1116 return (self.component_name != val)
1117 # This signals to use the normal comparison operator
1118 return NotImplemented
1120 def properties(self):
1121 return ['component_name', 'component_id', 'description', \
1122 'location_count', 'meets_dfsg', 'overrides_count']
1124 def not_null_constraints(self):
1125 return ['component_name']
1128 __all__.append('Component')
1131 def get_component(component, session=None):
1133 Returns database id for given C{component}.
1135 @type component: string
1136 @param component: The name of the override type
1139 @return: the database id for the given component
1142 component = component.lower()
1144 q = session.query(Component).filter_by(component_name=component)
1148 except NoResultFound:
1151 __all__.append('get_component')
1154 def get_component_names(session=None):
1156 Returns list of strings of component names.
1159 @return: list of strings of component names
1162 return [ x.component_name for x in session.query(Component).all() ]
1164 __all__.append('get_component_names')
1166 ################################################################################
1168 class DBConfig(object):
1169 def __init__(self, *args, **kwargs):
1173 return '<DBConfig %s>' % self.name
1175 __all__.append('DBConfig')
1177 ################################################################################
1180 def get_or_set_contents_file_id(filename, session=None):
1182 Returns database id for given filename.
1184 If no matching file is found, a row is inserted.
1186 @type filename: string
1187 @param filename: The filename
1188 @type session: SQLAlchemy
1189 @param session: Optional SQL session object (a temporary one will be
1190 generated if not supplied). If not passed, a commit will be performed at
1191 the end of the function, otherwise the caller is responsible for commiting.
1194 @return: the database id for the given component
1197 q = session.query(ContentFilename).filter_by(filename=filename)
1200 ret = q.one().cafilename_id
1201 except NoResultFound:
1202 cf = ContentFilename()
1203 cf.filename = filename
1205 session.commit_or_flush()
1206 ret = cf.cafilename_id
1210 __all__.append('get_or_set_contents_file_id')
1213 def get_contents(suite, overridetype, section=None, session=None):
1215 Returns contents for a suite / overridetype combination, limiting
1216 to a section if not None.
1219 @param suite: Suite object
1221 @type overridetype: OverrideType
1222 @param overridetype: OverrideType object
1224 @type section: Section
1225 @param section: Optional section object to limit results to
1227 @type session: SQLAlchemy
1228 @param session: Optional SQL session object (a temporary one will be
1229 generated if not supplied)
1231 @rtype: ResultsProxy
1232 @return: ResultsProxy object set up to return tuples of (filename, section,
1236 # find me all of the contents for a given suite
1237 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1241 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1242 JOIN content_file_names n ON (c.filename=n.id)
1243 JOIN binaries b ON (b.id=c.binary_pkg)
1244 JOIN override o ON (o.package=b.package)
1245 JOIN section s ON (s.id=o.section)
1246 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1247 AND b.type=:overridetypename"""
1249 vals = {'suiteid': suite.suite_id,
1250 'overridetypeid': overridetype.overridetype_id,
1251 'overridetypename': overridetype.overridetype}
1253 if section is not None:
1254 contents_q += " AND s.id = :sectionid"
1255 vals['sectionid'] = section.section_id
1257 contents_q += " ORDER BY fn"
1259 return session.execute(contents_q, vals)
1261 __all__.append('get_contents')
1263 ################################################################################
1265 class ContentFilepath(object):
1266 def __init__(self, *args, **kwargs):
1270 return '<ContentFilepath %s>' % self.filepath
1272 __all__.append('ContentFilepath')
1275 def get_or_set_contents_path_id(filepath, session=None):
1277 Returns database id for given path.
1279 If no matching file is found, a row is inserted.
1281 @type filepath: string
1282 @param filepath: The filepath
1284 @type session: SQLAlchemy
1285 @param session: Optional SQL session object (a temporary one will be
1286 generated if not supplied). If not passed, a commit will be performed at
1287 the end of the function, otherwise the caller is responsible for commiting.
1290 @return: the database id for the given path
1293 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1296 ret = q.one().cafilepath_id
1297 except NoResultFound:
1298 cf = ContentFilepath()
1299 cf.filepath = filepath
1301 session.commit_or_flush()
1302 ret = cf.cafilepath_id
1306 __all__.append('get_or_set_contents_path_id')
1308 ################################################################################
1310 class ContentAssociation(object):
1311 def __init__(self, *args, **kwargs):
1315 return '<ContentAssociation %s>' % self.ca_id
1317 __all__.append('ContentAssociation')
1319 def insert_content_paths(binary_id, fullpaths, session=None):
1321 Make sure given path is associated with given binary id
1323 @type binary_id: int
1324 @param binary_id: the id of the binary
1325 @type fullpaths: list
1326 @param fullpaths: the list of paths of the file being associated with the binary
1327 @type session: SQLAlchemy session
1328 @param session: Optional SQLAlchemy session. If this is passed, the caller
1329 is responsible for ensuring a transaction has begun and committing the
1330 results or rolling back based on the result code. If not passed, a commit
1331 will be performed at the end of the function, otherwise the caller is
1332 responsible for commiting.
1334 @return: True upon success
1337 privatetrans = False
1339 session = DBConn().session()
1344 def generate_path_dicts():
1345 for fullpath in fullpaths:
1346 if fullpath.startswith( './' ):
1347 fullpath = fullpath[2:]
1349 yield {'filename':fullpath, 'id': binary_id }
1351 for d in generate_path_dicts():
1352 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1361 traceback.print_exc()
1363 # Only rollback if we set up the session ourself
1370 __all__.append('insert_content_paths')
1372 ################################################################################
1374 class DSCFile(object):
1375 def __init__(self, *args, **kwargs):
1379 return '<DSCFile %s>' % self.dscfile_id
1381 __all__.append('DSCFile')
1384 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1386 Returns a list of DSCFiles which may be empty
1388 @type dscfile_id: int (optional)
1389 @param dscfile_id: the dscfile_id of the DSCFiles to find
1391 @type source_id: int (optional)
1392 @param source_id: the source id related to the DSCFiles to find
1394 @type poolfile_id: int (optional)
1395 @param poolfile_id: the poolfile id related to the DSCFiles to find
1398 @return: Possibly empty list of DSCFiles
1401 q = session.query(DSCFile)
1403 if dscfile_id is not None:
1404 q = q.filter_by(dscfile_id=dscfile_id)
1406 if source_id is not None:
1407 q = q.filter_by(source_id=source_id)
1409 if poolfile_id is not None:
1410 q = q.filter_by(poolfile_id=poolfile_id)
1414 __all__.append('get_dscfiles')
1416 ################################################################################
1418 class ExternalOverride(ORMObject):
1419 def __init__(self, *args, **kwargs):
1423 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1425 __all__.append('ExternalOverride')
1427 ################################################################################
1429 class PoolFile(ORMObject):
1430 def __init__(self, filename = None, location = None, filesize = -1, \
1432 self.filename = filename
1433 self.location = location
1434 self.filesize = filesize
1435 self.md5sum = md5sum
1439 session = DBConn().session().object_session(self)
1440 af = session.query(ArchiveFile).join(Archive).filter(ArchiveFile.file == self).first()
1444 def component(self):
1445 session = DBConn().session().object_session(self)
1446 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1447 .group_by(ArchiveFile.component_id).one()
1448 return session.query(Component).get(component_id)
1452 return os.path.basename(self.filename)
1454 def is_valid(self, filesize = -1, md5sum = None):
1455 return self.filesize == long(filesize) and self.md5sum == md5sum
1457 def properties(self):
1458 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1459 'sha256sum', 'source', 'binary', 'last_used']
1461 def not_null_constraints(self):
1462 return ['filename', 'md5sum']
1464 def identical_to(self, filename):
1466 compare size and hash with the given file
1469 @return: true if the given file has the same size and hash as this object; false otherwise
1471 st = os.stat(filename)
1472 if self.filesize != st.st_size:
1475 f = open(filename, "r")
1476 sha256sum = apt_pkg.sha256sum(f)
1477 if sha256sum != self.sha256sum:
1482 __all__.append('PoolFile')
1485 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1488 (ValidFileFound [boolean], PoolFile object or None)
1490 @type filename: string
1491 @param filename: the filename of the file to check against the DB
1494 @param filesize: the size of the file to check against the DB
1496 @type md5sum: string
1497 @param md5sum: the md5sum of the file to check against the DB
1499 @type location_id: int
1500 @param location_id: the id of the location to look in
1503 @return: Tuple of length 2.
1504 - If valid pool file found: (C{True}, C{PoolFile object})
1505 - If valid pool file not found:
1506 - (C{False}, C{None}) if no file found
1507 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1510 poolfile = session.query(Location).get(location_id). \
1511 files.filter_by(filename=filename).first()
1513 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1516 return (valid, poolfile)
1518 __all__.append('check_poolfile')
1520 # TODO: the implementation can trivially be inlined at the place where the
1521 # function is called
1523 def get_poolfile_by_id(file_id, session=None):
1525 Returns a PoolFile objects or None for the given id
1528 @param file_id: the id of the file to look for
1530 @rtype: PoolFile or None
1531 @return: either the PoolFile object or None
1534 return session.query(PoolFile).get(file_id)
1536 __all__.append('get_poolfile_by_id')
1539 def get_poolfile_like_name(filename, session=None):
1541 Returns an array of PoolFile objects which are like the given name
1543 @type filename: string
1544 @param filename: the filename of the file to check against the DB
1547 @return: array of PoolFile objects
1550 # TODO: There must be a way of properly using bind parameters with %FOO%
1551 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1555 __all__.append('get_poolfile_like_name')
1558 def add_poolfile(filename, datadict, location_id, session=None):
1560 Add a new file to the pool
1562 @type filename: string
1563 @param filename: filename
1565 @type datadict: dict
1566 @param datadict: dict with needed data
1568 @type location_id: int
1569 @param location_id: database id of the location
1572 @return: the PoolFile object created
1574 poolfile = PoolFile()
1575 poolfile.filename = filename
1576 poolfile.filesize = datadict["size"]
1577 poolfile.md5sum = datadict["md5sum"]
1578 poolfile.sha1sum = datadict["sha1sum"]
1579 poolfile.sha256sum = datadict["sha256sum"]
1580 poolfile.location_id = location_id
1582 session.add(poolfile)
1583 # Flush to get a file id (NB: This is not a commit)
1588 __all__.append('add_poolfile')
1590 ################################################################################
1592 class Fingerprint(ORMObject):
1593 def __init__(self, fingerprint = None):
1594 self.fingerprint = fingerprint
1596 def properties(self):
1597 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1600 def not_null_constraints(self):
1601 return ['fingerprint']
1603 __all__.append('Fingerprint')
1606 def get_fingerprint(fpr, session=None):
1608 Returns Fingerprint object for given fpr.
1611 @param fpr: The fpr to find / add
1613 @type session: SQLAlchemy
1614 @param session: Optional SQL session object (a temporary one will be
1615 generated if not supplied).
1618 @return: the Fingerprint object for the given fpr or None
1621 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1625 except NoResultFound:
1630 __all__.append('get_fingerprint')
1633 def get_or_set_fingerprint(fpr, session=None):
1635 Returns Fingerprint object for given fpr.
1637 If no matching fpr is found, a row is inserted.
1640 @param fpr: The fpr to find / add
1642 @type session: SQLAlchemy
1643 @param session: Optional SQL session object (a temporary one will be
1644 generated if not supplied). If not passed, a commit will be performed at
1645 the end of the function, otherwise the caller is responsible for commiting.
1646 A flush will be performed either way.
1649 @return: the Fingerprint object for the given fpr
1652 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1656 except NoResultFound:
1657 fingerprint = Fingerprint()
1658 fingerprint.fingerprint = fpr
1659 session.add(fingerprint)
1660 session.commit_or_flush()
1665 __all__.append('get_or_set_fingerprint')
1667 ################################################################################
1669 # Helper routine for Keyring class
1670 def get_ldap_name(entry):
1672 for k in ["cn", "mn", "sn"]:
1674 if ret and ret[0] != "" and ret[0] != "-":
1676 return " ".join(name)
1678 ################################################################################
1680 class Keyring(object):
1681 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1682 " --with-colons --fingerprint --fingerprint"
1687 def __init__(self, *args, **kwargs):
1691 return '<Keyring %s>' % self.keyring_name
1693 def de_escape_gpg_str(self, txt):
1694 esclist = re.split(r'(\\x..)', txt)
1695 for x in range(1,len(esclist),2):
1696 esclist[x] = "%c" % (int(esclist[x][2:],16))
1697 return "".join(esclist)
1699 def parse_address(self, uid):
1700 """parses uid and returns a tuple of real name and email address"""
1702 (name, address) = email.Utils.parseaddr(uid)
1703 name = re.sub(r"\s*[(].*[)]", "", name)
1704 name = self.de_escape_gpg_str(name)
1707 return (name, address)
1709 def load_keys(self, keyring):
1710 if not self.keyring_id:
1711 raise Exception('Must be initialized with database information')
1713 k = os.popen(self.gpg_invocation % keyring, "r")
1718 field = line.split(":")
1719 if field[0] == "pub":
1722 (name, addr) = self.parse_address(field[9])
1724 self.keys[key]["email"] = addr
1725 self.keys[key]["name"] = name
1726 self.keys[key]["fingerprints"] = []
1728 elif key and field[0] == "sub" and len(field) >= 12:
1729 signingkey = ("s" in field[11])
1730 elif key and field[0] == "uid":
1731 (name, addr) = self.parse_address(field[9])
1732 if "email" not in self.keys[key] and "@" in addr:
1733 self.keys[key]["email"] = addr
1734 self.keys[key]["name"] = name
1735 elif signingkey and field[0] == "fpr":
1736 self.keys[key]["fingerprints"].append(field[9])
1737 self.fpr_lookup[field[9]] = key
1739 def import_users_from_ldap(self, session):
1743 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1744 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1746 l = ldap.open(LDAPServer)
1747 l.simple_bind_s("","")
1748 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1749 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1750 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1752 ldap_fin_uid_id = {}
1759 uid = entry["uid"][0]
1760 name = get_ldap_name(entry)
1761 fingerprints = entry["keyFingerPrint"]
1763 for f in fingerprints:
1764 key = self.fpr_lookup.get(f, None)
1765 if key not in self.keys:
1767 self.keys[key]["uid"] = uid
1771 keyid = get_or_set_uid(uid, session).uid_id
1772 byuid[keyid] = (uid, name)
1773 byname[uid] = (keyid, name)
1775 return (byname, byuid)
1777 def generate_users_from_keyring(self, format, session):
1781 for x in self.keys.keys():
1782 if "email" not in self.keys[x]:
1784 self.keys[x]["uid"] = format % "invalid-uid"
1786 uid = format % self.keys[x]["email"]
1787 keyid = get_or_set_uid(uid, session).uid_id
1788 byuid[keyid] = (uid, self.keys[x]["name"])
1789 byname[uid] = (keyid, self.keys[x]["name"])
1790 self.keys[x]["uid"] = uid
1793 uid = format % "invalid-uid"
1794 keyid = get_or_set_uid(uid, session).uid_id
1795 byuid[keyid] = (uid, "ungeneratable user id")
1796 byname[uid] = (keyid, "ungeneratable user id")
1798 return (byname, byuid)
1800 __all__.append('Keyring')
1803 def get_keyring(keyring, session=None):
1805 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1806 If C{keyring} already has an entry, simply return the existing Keyring
1808 @type keyring: string
1809 @param keyring: the keyring name
1812 @return: the Keyring object for this keyring
1815 q = session.query(Keyring).filter_by(keyring_name=keyring)
1819 except NoResultFound:
1822 __all__.append('get_keyring')
1825 def get_active_keyring_paths(session=None):
1828 @return: list of active keyring paths
1830 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1832 __all__.append('get_active_keyring_paths')
1835 def get_primary_keyring_path(session=None):
1837 Get the full path to the highest priority active keyring
1840 @return: path to the active keyring with the highest priority or None if no
1841 keyring is configured
1843 keyrings = get_active_keyring_paths()
1845 if len(keyrings) > 0:
1850 __all__.append('get_primary_keyring_path')
1852 ################################################################################
1854 class KeyringACLMap(object):
1855 def __init__(self, *args, **kwargs):
1859 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1861 __all__.append('KeyringACLMap')
1863 ################################################################################
1865 class DBChange(object):
1866 def __init__(self, *args, **kwargs):
1870 return '<DBChange %s>' % self.changesname
1872 def clean_from_queue(self):
1873 session = DBConn().session().object_session(self)
1875 # Remove changes_pool_files entries
1878 # Remove changes_pending_files references
1881 # Clear out of queue
1882 self.in_queue = None
1883 self.approved_for_id = None
1885 __all__.append('DBChange')
1888 def get_dbchange(filename, session=None):
1890 returns DBChange object for given C{filename}.
1892 @type filename: string
1893 @param filename: the name of the file
1895 @type session: Session
1896 @param session: Optional SQLA session object (a temporary one will be
1897 generated if not supplied)
1900 @return: DBChange object for the given filename (C{None} if not present)
1903 q = session.query(DBChange).filter_by(changesname=filename)
1907 except NoResultFound:
1910 __all__.append('get_dbchange')
1912 ################################################################################
1914 class Location(ORMObject):
1915 def __init__(self, path = None, component = None):
1917 self.component = component
1918 # the column 'type' should go away, see comment at mapper
1919 self.archive_type = 'pool'
1921 def properties(self):
1922 return ['path', 'location_id', 'archive_type', 'component', \
1925 def not_null_constraints(self):
1926 return ['path', 'archive_type']
1928 __all__.append('Location')
1931 def get_location(location, component=None, archive=None, session=None):
1933 Returns Location object for the given combination of location, component
1936 @type location: string
1937 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1939 @type component: string
1940 @param component: the component name (if None, no restriction applied)
1942 @type archive: string
1943 @param archive: the archive name (if None, no restriction applied)
1945 @rtype: Location / None
1946 @return: Either a Location object or None if one can't be found
1949 q = session.query(Location).filter_by(path=location)
1951 if archive is not None:
1952 q = q.join(Archive).filter_by(archive_name=archive)
1954 if component is not None:
1955 q = q.join(Component).filter_by(component_name=component)
1959 except NoResultFound:
1962 __all__.append('get_location')
1964 ################################################################################
1966 class Maintainer(ORMObject):
1967 def __init__(self, name = None):
1970 def properties(self):
1971 return ['name', 'maintainer_id']
1973 def not_null_constraints(self):
1976 def get_split_maintainer(self):
1977 if not hasattr(self, 'name') or self.name is None:
1978 return ('', '', '', '')
1980 return fix_maintainer(self.name.strip())
1982 __all__.append('Maintainer')
1985 def get_or_set_maintainer(name, session=None):
1987 Returns Maintainer object for given maintainer name.
1989 If no matching maintainer name is found, a row is inserted.
1992 @param name: The maintainer name to add
1994 @type session: SQLAlchemy
1995 @param session: Optional SQL session object (a temporary one will be
1996 generated if not supplied). If not passed, a commit will be performed at
1997 the end of the function, otherwise the caller is responsible for commiting.
1998 A flush will be performed either way.
2001 @return: the Maintainer object for the given maintainer
2004 q = session.query(Maintainer).filter_by(name=name)
2007 except NoResultFound:
2008 maintainer = Maintainer()
2009 maintainer.name = name
2010 session.add(maintainer)
2011 session.commit_or_flush()
2016 __all__.append('get_or_set_maintainer')
2019 def get_maintainer(maintainer_id, session=None):
2021 Return the name of the maintainer behind C{maintainer_id} or None if that
2022 maintainer_id is invalid.
2024 @type maintainer_id: int
2025 @param maintainer_id: the id of the maintainer
2028 @return: the Maintainer with this C{maintainer_id}
2031 return session.query(Maintainer).get(maintainer_id)
2033 __all__.append('get_maintainer')
2035 ################################################################################
2037 class NewComment(object):
2038 def __init__(self, *args, **kwargs):
2042 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
2044 __all__.append('NewComment')
2047 def has_new_comment(package, version, session=None):
2049 Returns true if the given combination of C{package}, C{version} has a comment.
2051 @type package: string
2052 @param package: name of the package
2054 @type version: string
2055 @param version: package version
2057 @type session: Session
2058 @param session: Optional SQLA session object (a temporary one will be
2059 generated if not supplied)
2065 q = session.query(NewComment)
2066 q = q.filter_by(package=package)
2067 q = q.filter_by(version=version)
2069 return bool(q.count() > 0)
2071 __all__.append('has_new_comment')
2074 def get_new_comments(package=None, version=None, comment_id=None, session=None):
2076 Returns (possibly empty) list of NewComment objects for the given
2079 @type package: string (optional)
2080 @param package: name of the package
2082 @type version: string (optional)
2083 @param version: package version
2085 @type comment_id: int (optional)
2086 @param comment_id: An id of a comment
2088 @type session: Session
2089 @param session: Optional SQLA session object (a temporary one will be
2090 generated if not supplied)
2093 @return: A (possibly empty) list of NewComment objects will be returned
2096 q = session.query(NewComment)
2097 if package is not None: q = q.filter_by(package=package)
2098 if version is not None: q = q.filter_by(version=version)
2099 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
2103 __all__.append('get_new_comments')
2105 ################################################################################
2107 class Override(ORMObject):
2108 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
2109 section = None, priority = None):
2110 self.package = package
2112 self.component = component
2113 self.overridetype = overridetype
2114 self.section = section
2115 self.priority = priority
2117 def properties(self):
2118 return ['package', 'suite', 'component', 'overridetype', 'section', \
2121 def not_null_constraints(self):
2122 return ['package', 'suite', 'component', 'overridetype', 'section']
2124 __all__.append('Override')
2127 def get_override(package, suite=None, component=None, overridetype=None, session=None):
2129 Returns Override object for the given parameters
2131 @type package: string
2132 @param package: The name of the package
2134 @type suite: string, list or None
2135 @param suite: The name of the suite (or suites if a list) to limit to. If
2136 None, don't limit. Defaults to None.
2138 @type component: string, list or None
2139 @param component: The name of the component (or components if a list) to
2140 limit to. If None, don't limit. Defaults to None.
2142 @type overridetype: string, list or None
2143 @param overridetype: The name of the overridetype (or overridetypes if a list) to
2144 limit to. If None, don't limit. Defaults to None.
2146 @type session: Session
2147 @param session: Optional SQLA session object (a temporary one will be
2148 generated if not supplied)
2151 @return: A (possibly empty) list of Override objects will be returned
2154 q = session.query(Override)
2155 q = q.filter_by(package=package)
2157 if suite is not None:
2158 if not isinstance(suite, list): suite = [suite]
2159 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
2161 if component is not None:
2162 if not isinstance(component, list): component = [component]
2163 q = q.join(Component).filter(Component.component_name.in_(component))
2165 if overridetype is not None:
2166 if not isinstance(overridetype, list): overridetype = [overridetype]
2167 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
2171 __all__.append('get_override')
2174 ################################################################################
2176 class OverrideType(ORMObject):
2177 def __init__(self, overridetype = None):
2178 self.overridetype = overridetype
2180 def properties(self):
2181 return ['overridetype', 'overridetype_id', 'overrides_count']
2183 def not_null_constraints(self):
2184 return ['overridetype']
2186 __all__.append('OverrideType')
2189 def get_override_type(override_type, session=None):
2191 Returns OverrideType object for given C{override type}.
2193 @type override_type: string
2194 @param override_type: The name of the override type
2196 @type session: Session
2197 @param session: Optional SQLA session object (a temporary one will be
2198 generated if not supplied)
2201 @return: the database id for the given override type
2204 q = session.query(OverrideType).filter_by(overridetype=override_type)
2208 except NoResultFound:
2211 __all__.append('get_override_type')
2213 ################################################################################
2215 class PolicyQueue(object):
2216 def __init__(self, *args, **kwargs):
2220 return '<PolicyQueue %s>' % self.queue_name
2222 __all__.append('PolicyQueue')
2225 def get_policy_queue(queuename, session=None):
2227 Returns PolicyQueue object for given C{queue name}
2229 @type queuename: string
2230 @param queuename: The name of the queue
2232 @type session: Session
2233 @param session: Optional SQLA session object (a temporary one will be
2234 generated if not supplied)
2237 @return: PolicyQueue object for the given queue
2240 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2244 except NoResultFound:
2247 __all__.append('get_policy_queue')
2250 def get_policy_queue_from_path(pathname, session=None):
2252 Returns PolicyQueue object for given C{path name}
2254 @type queuename: string
2255 @param queuename: The path
2257 @type session: Session
2258 @param session: Optional SQLA session object (a temporary one will be
2259 generated if not supplied)
2262 @return: PolicyQueue object for the given queue
2265 q = session.query(PolicyQueue).filter_by(path=pathname)
2269 except NoResultFound:
2272 __all__.append('get_policy_queue_from_path')
2274 ################################################################################
2276 class PolicyQueueUpload(object):
2277 def __cmp__(self, other):
2278 ret = cmp(self.changes.source, other.changes.source)
2280 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
2282 if self.source is not None and other.source is None:
2284 elif self.source is None and other.source is not None:
2287 ret = cmp(self.changes.changesname, other.changes.changesname)
2290 __all__.append('PolicyQueueUpload')
2292 ################################################################################
2294 class PolicyQueueByhandFile(object):
2297 __all__.append('PolicyQueueByhandFile')
2299 ################################################################################
2301 class Priority(ORMObject):
2302 def __init__(self, priority = None, level = None):
2303 self.priority = priority
2306 def properties(self):
2307 return ['priority', 'priority_id', 'level', 'overrides_count']
2309 def not_null_constraints(self):
2310 return ['priority', 'level']
2312 def __eq__(self, val):
2313 if isinstance(val, str):
2314 return (self.priority == val)
2315 # This signals to use the normal comparison operator
2316 return NotImplemented
2318 def __ne__(self, val):
2319 if isinstance(val, str):
2320 return (self.priority != val)
2321 # This signals to use the normal comparison operator
2322 return NotImplemented
2324 __all__.append('Priority')
2327 def get_priority(priority, session=None):
2329 Returns Priority object for given C{priority name}.
2331 @type priority: string
2332 @param priority: The name of the priority
2334 @type session: Session
2335 @param session: Optional SQLA session object (a temporary one will be
2336 generated if not supplied)
2339 @return: Priority object for the given priority
2342 q = session.query(Priority).filter_by(priority=priority)
2346 except NoResultFound:
2349 __all__.append('get_priority')
2352 def get_priorities(session=None):
2354 Returns dictionary of priority names -> id mappings
2356 @type session: Session
2357 @param session: Optional SQL session object (a temporary one will be
2358 generated if not supplied)
2361 @return: dictionary of priority names -> id mappings
2365 q = session.query(Priority)
2367 ret[x.priority] = x.priority_id
2371 __all__.append('get_priorities')
2373 ################################################################################
2375 class Section(ORMObject):
2376 def __init__(self, section = None):
2377 self.section = section
2379 def properties(self):
2380 return ['section', 'section_id', 'overrides_count']
2382 def not_null_constraints(self):
2385 def __eq__(self, val):
2386 if isinstance(val, str):
2387 return (self.section == val)
2388 # This signals to use the normal comparison operator
2389 return NotImplemented
2391 def __ne__(self, val):
2392 if isinstance(val, str):
2393 return (self.section != val)
2394 # This signals to use the normal comparison operator
2395 return NotImplemented
2397 __all__.append('Section')
2400 def get_section(section, session=None):
2402 Returns Section object for given C{section name}.
2404 @type section: string
2405 @param section: The name of the section
2407 @type session: Session
2408 @param session: Optional SQLA session object (a temporary one will be
2409 generated if not supplied)
2412 @return: Section object for the given section name
2415 q = session.query(Section).filter_by(section=section)
2419 except NoResultFound:
2422 __all__.append('get_section')
2425 def get_sections(session=None):
2427 Returns dictionary of section names -> id mappings
2429 @type session: Session
2430 @param session: Optional SQL session object (a temporary one will be
2431 generated if not supplied)
2434 @return: dictionary of section names -> id mappings
2438 q = session.query(Section)
2440 ret[x.section] = x.section_id
2444 __all__.append('get_sections')
2446 ################################################################################
2448 class SrcContents(ORMObject):
2449 def __init__(self, file = None, source = None):
2451 self.source = source
2453 def properties(self):
2454 return ['file', 'source']
2456 __all__.append('SrcContents')
2458 ################################################################################
2460 class DBSource(ORMObject):
2461 def __init__(self, source = None, version = None, maintainer = None, \
2462 changedby = None, poolfile = None, install_date = None, fingerprint = None):
2463 self.source = source
2464 self.version = version
2465 self.maintainer = maintainer
2466 self.changedby = changedby
2467 self.poolfile = poolfile
2468 self.install_date = install_date
2469 self.fingerprint = fingerprint
2473 return self.source_id
2475 def properties(self):
2476 return ['source', 'source_id', 'maintainer', 'changedby', \
2477 'fingerprint', 'poolfile', 'version', 'suites_count', \
2478 'install_date', 'binaries_count', 'uploaders_count']
2480 def not_null_constraints(self):
2481 return ['source', 'version', 'install_date', 'maintainer', \
2482 'changedby', 'poolfile']
2484 def read_control_fields(self):
2486 Reads the control information from a dsc
2489 @return: fields is the dsc information in a dictionary form
2491 fullpath = self.poolfile.fullpath
2492 contents = open(fullpath, 'r').read()
2493 signed_file = SignedFile(contents, keyrings=[], require_signature=False)
2494 fields = apt_pkg.TagSection(signed_file.contents)
2497 metadata = association_proxy('key', 'value')
2499 def get_component_name(self):
2500 return self.poolfile.location.component.component_name
2502 def scan_contents(self):
2504 Returns a set of names for non directories. The path names are
2505 normalized after converting them from either utf-8 or iso8859-1
2508 fullpath = self.poolfile.fullpath
2509 from daklib.contents import UnpackedSource
2510 unpacked = UnpackedSource(fullpath)
2512 for name in unpacked.get_all_filenames():
2513 # enforce proper utf-8 encoding
2515 name.decode('utf-8')
2516 except UnicodeDecodeError:
2517 name = name.decode('iso8859-1').encode('utf-8')
2521 __all__.append('DBSource')
2524 def source_exists(source, source_version, suites = ["any"], session=None):
2526 Ensure that source exists somewhere in the archive for the binary
2527 upload being processed.
2528 1. exact match => 1.0-3
2529 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2531 @type source: string
2532 @param source: source name
2534 @type source_version: string
2535 @param source_version: expected source version
2538 @param suites: list of suites to check in, default I{any}
2540 @type session: Session
2541 @param session: Optional SQLA session object (a temporary one will be
2542 generated if not supplied)
2545 @return: returns 1 if a source with expected version is found, otherwise 0
2552 from daklib.regexes import re_bin_only_nmu
2553 orig_source_version = re_bin_only_nmu.sub('', source_version)
2555 for suite in suites:
2556 q = session.query(DBSource).filter_by(source=source). \
2557 filter(DBSource.version.in_([source_version, orig_source_version]))
2559 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2560 s = get_suite(suite, session)
2562 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2563 considered_suites = [ vc.reference for vc in enhances_vcs ]
2564 considered_suites.append(s)
2566 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2571 # No source found so return not ok
2576 __all__.append('source_exists')
2579 def get_suites_source_in(source, session=None):
2581 Returns list of Suite objects which given C{source} name is in
2584 @param source: DBSource package name to search for
2587 @return: list of Suite objects for the given source
2590 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2592 __all__.append('get_suites_source_in')
2595 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2597 Returns list of DBSource objects for given C{source} name and other parameters
2600 @param source: DBSource package name to search for
2602 @type version: str or None
2603 @param version: DBSource version name to search for or None if not applicable
2605 @type dm_upload_allowed: bool
2606 @param dm_upload_allowed: If None, no effect. If True or False, only
2607 return packages with that dm_upload_allowed setting
2609 @type session: Session
2610 @param session: Optional SQL session object (a temporary one will be
2611 generated if not supplied)
2614 @return: list of DBSource objects for the given name (may be empty)
2617 q = session.query(DBSource).filter_by(source=source)
2619 if version is not None:
2620 q = q.filter_by(version=version)
2622 if dm_upload_allowed is not None:
2623 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2627 __all__.append('get_sources_from_name')
2629 # FIXME: This function fails badly if it finds more than 1 source package and
2630 # its implementation is trivial enough to be inlined.
2632 def get_source_in_suite(source, suite, session=None):
2634 Returns a DBSource object for a combination of C{source} and C{suite}.
2636 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2637 - B{suite} - a suite name, eg. I{unstable}
2639 @type source: string
2640 @param source: source package name
2643 @param suite: the suite name
2646 @return: the version for I{source} in I{suite}
2650 q = get_suite(suite, session).get_sources(source)
2653 except NoResultFound:
2656 __all__.append('get_source_in_suite')
2659 def import_metadata_into_db(obj, session=None):
2661 This routine works on either DBBinary or DBSource objects and imports
2662 their metadata into the database
2664 fields = obj.read_control_fields()
2665 for k in fields.keys():
2668 val = str(fields[k])
2669 except UnicodeEncodeError:
2670 # Fall back to UTF-8
2672 val = fields[k].encode('utf-8')
2673 except UnicodeEncodeError:
2674 # Finally try iso8859-1
2675 val = fields[k].encode('iso8859-1')
2676 # Otherwise we allow the exception to percolate up and we cause
2677 # a reject as someone is playing silly buggers
2679 obj.metadata[get_or_set_metadatakey(k, session)] = val
2681 session.commit_or_flush()
2683 __all__.append('import_metadata_into_db')
2686 ################################################################################
2688 def split_uploaders(uploaders_list):
2690 Split the Uploaders field into the individual uploaders and yield each of
2691 them. Beware: email addresses might contain commas.
2694 for uploader in re.sub(">[ ]*,", ">\t", uploaders_list).split("\t"):
2695 yield uploader.strip()
2698 def add_dsc_to_db(u, filename, session=None):
2699 entry = u.pkg.files[filename]
2703 source.source = u.pkg.dsc["source"]
2704 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2705 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2706 # If Changed-By isn't available, fall back to maintainer
2707 if u.pkg.changes.has_key("changed-by"):
2708 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2710 source.changedby_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2711 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2712 source.install_date = datetime.now().date()
2714 dsc_component = entry["component"]
2715 dsc_location_id = entry["location id"]
2717 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2719 # Set up a new poolfile if necessary
2720 if not entry.has_key("files id") or not entry["files id"]:
2721 filename = entry["pool name"] + filename
2722 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2724 pfs.append(poolfile)
2725 entry["files id"] = poolfile.file_id
2727 source.poolfile_id = entry["files id"]
2730 suite_names = u.pkg.changes["distribution"].keys()
2731 source.suites = session.query(Suite). \
2732 filter(Suite.suite_name.in_(suite_names)).all()
2734 # Add the source files to the DB (files and dsc_files)
2736 dscfile.source_id = source.source_id
2737 dscfile.poolfile_id = entry["files id"]
2738 session.add(dscfile)
2740 for dsc_file, dentry in u.pkg.dsc_files.items():
2742 df.source_id = source.source_id
2744 # If the .orig tarball is already in the pool, it's
2745 # files id is stored in dsc_files by check_dsc().
2746 files_id = dentry.get("files id", None)
2748 # Find the entry in the files hash
2749 # TODO: Bail out here properly
2751 for f, e in u.pkg.files.items():
2756 if files_id is None:
2757 filename = dfentry["pool name"] + dsc_file
2759 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2760 # FIXME: needs to check for -1/-2 and or handle exception
2761 if found and obj is not None:
2762 files_id = obj.file_id
2765 # If still not found, add it
2766 if files_id is None:
2767 # HACK: Force sha1sum etc into dentry
2768 dentry["sha1sum"] = dfentry["sha1sum"]
2769 dentry["sha256sum"] = dfentry["sha256sum"]
2770 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2771 pfs.append(poolfile)
2772 files_id = poolfile.file_id
2774 poolfile = get_poolfile_by_id(files_id, session)
2775 if poolfile is None:
2776 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2777 pfs.append(poolfile)
2779 df.poolfile_id = files_id
2782 # Add the src_uploaders to the DB
2784 session.refresh(source)
2785 source.uploaders = [source.maintainer]
2786 if u.pkg.dsc.has_key("uploaders"):
2787 for up in split_uploaders(u.pkg.dsc["uploaders"]):
2788 source.uploaders.append(get_or_set_maintainer(up, session))
2792 return source, dsc_component, dsc_location_id, pfs
2794 __all__.append('add_dsc_to_db')
2797 def add_deb_to_db(u, filename, session=None):
2799 Contrary to what you might expect, this routine deals with both
2800 debs and udebs. That info is in 'dbtype', whilst 'type' is
2801 'deb' for both of them
2804 entry = u.pkg.files[filename]
2807 bin.package = entry["package"]
2808 bin.version = entry["version"]
2809 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2810 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2811 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2812 bin.binarytype = entry["dbtype"]
2815 filename = entry["pool name"] + filename
2816 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2817 if not entry.get("location id", None):
2818 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2820 if entry.get("files id", None):
2821 poolfile = get_poolfile_by_id(bin.poolfile_id)
2822 bin.poolfile_id = entry["files id"]
2824 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2825 bin.poolfile_id = entry["files id"] = poolfile.file_id
2828 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2830 # If we couldn't find anything and the upload contains Arch: source,
2831 # fall back to trying the source package, source version uploaded
2832 # This maintains backwards compatibility with previous dak behaviour
2833 # and deals with slightly broken binary debs which don't properly
2834 # declare their source package name
2835 if len(bin_sources) == 0:
2836 if u.pkg.changes["architecture"].has_key("source") \
2837 and u.pkg.dsc.has_key("source") and u.pkg.dsc.has_key("version"):
2838 bin_sources = get_sources_from_name(u.pkg.dsc["source"], u.pkg.dsc["version"], session=session)
2840 # If we couldn't find a source here, we reject
2841 # TODO: Fix this so that it doesn't kill process-upload and instead just
2842 # performs a reject. To be honest, we should probably spot this
2843 # *much* earlier than here
2844 if len(bin_sources) != 1:
2845 raise NoSourceFieldError("Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2846 (bin.package, bin.version, entry["architecture"],
2847 filename, bin.binarytype, u.pkg.changes["fingerprint"]))
2849 bin.source_id = bin_sources[0].source_id
2851 if entry.has_key("built-using"):
2852 for srcname, version in entry["built-using"]:
2853 exsources = get_sources_from_name(srcname, version, session=session)
2854 if len(exsources) != 1:
2855 raise NoSourceFieldError("Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2856 (srcname, version, bin.package, bin.version, entry["architecture"],
2857 filename, bin.binarytype, u.pkg.changes["fingerprint"]))
2859 bin.extra_sources.append(exsources[0])
2861 # Add and flush object so it has an ID
2864 suite_names = u.pkg.changes["distribution"].keys()
2865 bin.suites = session.query(Suite). \
2866 filter(Suite.suite_name.in_(suite_names)).all()
2870 # Deal with contents - disabled for now
2871 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2873 # print "REJECT\nCould not determine contents of package %s" % bin.package
2874 # session.rollback()
2875 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2877 return bin, poolfile
2879 __all__.append('add_deb_to_db')
2881 ################################################################################
2883 class SourceACL(object):
2884 def __init__(self, *args, **kwargs):
2888 return '<SourceACL %s>' % self.source_acl_id
2890 __all__.append('SourceACL')
2892 ################################################################################
2894 class SrcFormat(object):
2895 def __init__(self, *args, **kwargs):
2899 return '<SrcFormat %s>' % (self.format_name)
2901 __all__.append('SrcFormat')
2903 ################################################################################
2905 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2906 ('SuiteID', 'suite_id'),
2907 ('Version', 'version'),
2908 ('Origin', 'origin'),
2910 ('Description', 'description'),
2911 ('Untouchable', 'untouchable'),
2912 ('Announce', 'announce'),
2913 ('Codename', 'codename'),
2914 ('OverrideCodename', 'overridecodename'),
2915 ('ValidTime', 'validtime'),
2916 ('Priority', 'priority'),
2917 ('NotAutomatic', 'notautomatic'),
2918 ('CopyChanges', 'copychanges'),
2919 ('OverrideSuite', 'overridesuite')]
2921 # Why the heck don't we have any UNIQUE constraints in table suite?
2922 # TODO: Add UNIQUE constraints for appropriate columns.
2923 class Suite(ORMObject):
2924 def __init__(self, suite_name = None, version = None):
2925 self.suite_name = suite_name
2926 self.version = version
2928 def properties(self):
2929 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2932 def not_null_constraints(self):
2933 return ['suite_name']
2935 def __eq__(self, val):
2936 if isinstance(val, str):
2937 return (self.suite_name == val)
2938 # This signals to use the normal comparison operator
2939 return NotImplemented
2941 def __ne__(self, val):
2942 if isinstance(val, str):
2943 return (self.suite_name != val)
2944 # This signals to use the normal comparison operator
2945 return NotImplemented
2949 for disp, field in SUITE_FIELDS:
2950 val = getattr(self, field, None)
2952 ret.append("%s: %s" % (disp, val))
2954 return "\n".join(ret)
2956 def get_architectures(self, skipsrc=False, skipall=False):
2958 Returns list of Architecture objects
2960 @type skipsrc: boolean
2961 @param skipsrc: Whether to skip returning the 'source' architecture entry
2964 @type skipall: boolean
2965 @param skipall: Whether to skip returning the 'all' architecture entry
2969 @return: list of Architecture objects for the given name (may be empty)
2972 q = object_session(self).query(Architecture).with_parent(self)
2974 q = q.filter(Architecture.arch_string != 'source')
2976 q = q.filter(Architecture.arch_string != 'all')
2977 return q.order_by(Architecture.arch_string).all()
2979 def get_sources(self, source):
2981 Returns a query object representing DBSource that is part of C{suite}.
2983 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2985 @type source: string
2986 @param source: source package name
2988 @rtype: sqlalchemy.orm.query.Query
2989 @return: a query of DBSource
2993 session = object_session(self)
2994 return session.query(DBSource).filter_by(source = source). \
2997 def get_overridesuite(self):
2998 if self.overridesuite is None:
3001 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
3005 return os.path.join(self.archive.path, 'dists', self.suite_name)
3007 __all__.append('Suite')
3010 def get_suite(suite, session=None):
3012 Returns Suite object for given C{suite name}.
3015 @param suite: The name of the suite
3017 @type session: Session
3018 @param session: Optional SQLA session object (a temporary one will be
3019 generated if not supplied)
3022 @return: Suite object for the requested suite name (None if not present)
3025 q = session.query(Suite).filter_by(suite_name=suite)
3029 except NoResultFound:
3032 __all__.append('get_suite')
3034 ################################################################################
3037 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
3039 Returns list of Architecture objects for given C{suite} name. The list is
3040 empty if suite does not exist.
3043 @param suite: Suite name to search for
3045 @type skipsrc: boolean
3046 @param skipsrc: Whether to skip returning the 'source' architecture entry
3049 @type skipall: boolean
3050 @param skipall: Whether to skip returning the 'all' architecture entry
3053 @type session: Session
3054 @param session: Optional SQL session object (a temporary one will be
3055 generated if not supplied)
3058 @return: list of Architecture objects for the given name (may be empty)
3062 return get_suite(suite, session).get_architectures(skipsrc, skipall)
3063 except AttributeError:
3066 __all__.append('get_suite_architectures')
3068 ################################################################################
3070 class Uid(ORMObject):
3071 def __init__(self, uid = None, name = None):
3075 def __eq__(self, val):
3076 if isinstance(val, str):
3077 return (self.uid == val)
3078 # This signals to use the normal comparison operator
3079 return NotImplemented
3081 def __ne__(self, val):
3082 if isinstance(val, str):
3083 return (self.uid != val)
3084 # This signals to use the normal comparison operator
3085 return NotImplemented
3087 def properties(self):
3088 return ['uid', 'name', 'fingerprint']
3090 def not_null_constraints(self):
3093 __all__.append('Uid')
3096 def get_or_set_uid(uidname, session=None):
3098 Returns uid object for given uidname.
3100 If no matching uidname is found, a row is inserted.
3102 @type uidname: string
3103 @param uidname: The uid to add
3105 @type session: SQLAlchemy
3106 @param session: Optional SQL session object (a temporary one will be
3107 generated if not supplied). If not passed, a commit will be performed at
3108 the end of the function, otherwise the caller is responsible for commiting.
3111 @return: the uid object for the given uidname
3114 q = session.query(Uid).filter_by(uid=uidname)
3118 except NoResultFound:
3122 session.commit_or_flush()
3127 __all__.append('get_or_set_uid')
3130 def get_uid_from_fingerprint(fpr, session=None):
3131 q = session.query(Uid)
3132 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
3136 except NoResultFound:
3139 __all__.append('get_uid_from_fingerprint')
3141 ################################################################################
3143 class UploadBlock(object):
3144 def __init__(self, *args, **kwargs):
3148 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
3150 __all__.append('UploadBlock')
3152 ################################################################################
3154 class MetadataKey(ORMObject):
3155 def __init__(self, key = None):
3158 def properties(self):
3161 def not_null_constraints(self):
3164 __all__.append('MetadataKey')
3167 def get_or_set_metadatakey(keyname, session=None):
3169 Returns MetadataKey object for given uidname.
3171 If no matching keyname is found, a row is inserted.
3173 @type uidname: string
3174 @param uidname: The keyname to add
3176 @type session: SQLAlchemy
3177 @param session: Optional SQL session object (a temporary one will be
3178 generated if not supplied). If not passed, a commit will be performed at
3179 the end of the function, otherwise the caller is responsible for commiting.
3182 @return: the metadatakey object for the given keyname
3185 q = session.query(MetadataKey).filter_by(key=keyname)
3189 except NoResultFound:
3190 ret = MetadataKey(keyname)
3192 session.commit_or_flush()
3196 __all__.append('get_or_set_metadatakey')
3198 ################################################################################
3200 class BinaryMetadata(ORMObject):
3201 def __init__(self, key = None, value = None, binary = None):
3204 self.binary = binary
3206 def properties(self):
3207 return ['binary', 'key', 'value']
3209 def not_null_constraints(self):
3212 __all__.append('BinaryMetadata')
3214 ################################################################################
3216 class SourceMetadata(ORMObject):
3217 def __init__(self, key = None, value = None, source = None):
3220 self.source = source
3222 def properties(self):
3223 return ['source', 'key', 'value']
3225 def not_null_constraints(self):
3228 __all__.append('SourceMetadata')
3230 ################################################################################
3232 class VersionCheck(ORMObject):
3233 def __init__(self, *args, **kwargs):
3236 def properties(self):
3237 #return ['suite_id', 'check', 'reference_id']
3240 def not_null_constraints(self):
3241 return ['suite', 'check', 'reference']
3243 __all__.append('VersionCheck')
3246 def get_version_checks(suite_name, check = None, session = None):
3247 suite = get_suite(suite_name, session)
3249 # Make sure that what we return is iterable so that list comprehensions
3250 # involving this don't cause a traceback
3252 q = session.query(VersionCheck).filter_by(suite=suite)
3254 q = q.filter_by(check=check)
3257 __all__.append('get_version_checks')
3259 ################################################################################
3261 class DBConn(object):
3263 database module init.
3267 def __init__(self, *args, **kwargs):
3268 self.__dict__ = self.__shared_state
3270 if not getattr(self, 'initialised', False):
3271 self.initialised = True
3272 self.debug = kwargs.has_key('debug')
3275 def __setuptables(self):
3282 'binaries_metadata',
3286 'build_queue_files',
3287 'build_queue_policy_files',
3292 'changes_pending_binaries',
3293 'changes_pending_files',
3294 'changes_pending_source',
3295 'changes_pending_files_map',
3296 'changes_pending_source_files',
3297 'changes_pool_files',
3299 'external_overrides',
3300 'extra_src_references',
3302 'files_archive_map',
3310 # TODO: the maintainer column in table override should be removed.
3314 'policy_queue_upload',
3315 'policy_queue_upload_binaries_map',
3316 'policy_queue_byhand_file',
3327 'suite_architectures',
3328 'suite_build_queue_copy',
3329 'suite_src_formats',
3336 'almost_obsolete_all_associations',
3337 'almost_obsolete_src_associations',
3338 'any_associations_source',
3339 'bin_associations_binaries',
3340 'binaries_suite_arch',
3343 'newest_all_associations',
3344 'newest_any_associations',
3346 'newest_src_association',
3347 'obsolete_all_associations',
3348 'obsolete_any_associations',
3349 'obsolete_any_by_all_associations',
3350 'obsolete_src_associations',
3352 'src_associations_bin',
3353 'src_associations_src',
3354 'suite_arch_by_name',
3357 for table_name in tables:
3358 table = Table(table_name, self.db_meta, \
3359 autoload=True, useexisting=True)
3360 setattr(self, 'tbl_%s' % table_name, table)
3362 for view_name in views:
3363 view = Table(view_name, self.db_meta, autoload=True)
3364 setattr(self, 'view_%s' % view_name, view)
3366 def __setupmappers(self):
3367 mapper(Architecture, self.tbl_architecture,
3368 properties = dict(arch_id = self.tbl_architecture.c.id,
3369 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3370 order_by=self.tbl_suite.c.suite_name,
3371 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
3372 extension = validator)
3374 mapper(Archive, self.tbl_archive,
3375 properties = dict(archive_id = self.tbl_archive.c.id,
3376 archive_name = self.tbl_archive.c.name))
3378 mapper(ArchiveFile, self.tbl_files_archive_map,
3379 properties = dict(archive = relation(Archive, backref='files'),
3380 component = relation(Component),
3381 file = relation(PoolFile, backref='archives')))
3383 mapper(BuildQueue, self.tbl_build_queue,
3384 properties = dict(queue_id = self.tbl_build_queue.c.id,
3385 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
3387 mapper(BuildQueueFile, self.tbl_build_queue_files,
3388 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3389 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3391 mapper(BuildQueuePolicyFile, self.tbl_build_queue_policy_files,
3393 build_queue = relation(BuildQueue, backref='policy_queue_files'),
3394 file = relation(ChangePendingFile, lazy='joined')))
3396 mapper(DBBinary, self.tbl_binaries,
3397 properties = dict(binary_id = self.tbl_binaries.c.id,
3398 package = self.tbl_binaries.c.package,
3399 version = self.tbl_binaries.c.version,
3400 maintainer_id = self.tbl_binaries.c.maintainer,
3401 maintainer = relation(Maintainer),
3402 source_id = self.tbl_binaries.c.source,
3403 source = relation(DBSource, backref='binaries'),
3404 arch_id = self.tbl_binaries.c.architecture,
3405 architecture = relation(Architecture),
3406 poolfile_id = self.tbl_binaries.c.file,
3407 poolfile = relation(PoolFile),
3408 binarytype = self.tbl_binaries.c.type,
3409 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3410 fingerprint = relation(Fingerprint),
3411 install_date = self.tbl_binaries.c.install_date,
3412 suites = relation(Suite, secondary=self.tbl_bin_associations,
3413 backref=backref('binaries', lazy='dynamic')),
3414 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3415 backref=backref('extra_binary_references', lazy='dynamic')),
3416 key = relation(BinaryMetadata, cascade='all',
3417 collection_class=attribute_mapped_collection('key'))),
3418 extension = validator)
3420 mapper(BinaryACL, self.tbl_binary_acl,
3421 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3423 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3424 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3425 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3426 architecture = relation(Architecture)))
3428 mapper(Component, self.tbl_component,
3429 properties = dict(component_id = self.tbl_component.c.id,
3430 component_name = self.tbl_component.c.name),
3431 extension = validator)
3433 mapper(DBConfig, self.tbl_config,
3434 properties = dict(config_id = self.tbl_config.c.id))
3436 mapper(DSCFile, self.tbl_dsc_files,
3437 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3438 source_id = self.tbl_dsc_files.c.source,
3439 source = relation(DBSource),
3440 poolfile_id = self.tbl_dsc_files.c.file,
3441 poolfile = relation(PoolFile)))
3443 mapper(ExternalOverride, self.tbl_external_overrides,
3445 suite_id = self.tbl_external_overrides.c.suite,
3446 suite = relation(Suite),
3447 component_id = self.tbl_external_overrides.c.component,
3448 component = relation(Component)))
3450 mapper(PoolFile, self.tbl_files,
3451 properties = dict(file_id = self.tbl_files.c.id,
3452 filesize = self.tbl_files.c.size),
3453 extension = validator)
3455 mapper(Fingerprint, self.tbl_fingerprint,
3456 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3457 uid_id = self.tbl_fingerprint.c.uid,
3458 uid = relation(Uid),
3459 keyring_id = self.tbl_fingerprint.c.keyring,
3460 keyring = relation(Keyring),
3461 source_acl = relation(SourceACL),
3462 binary_acl = relation(BinaryACL)),
3463 extension = validator)
3465 mapper(Keyring, self.tbl_keyrings,
3466 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3467 keyring_id = self.tbl_keyrings.c.id))
3469 mapper(DBChange, self.tbl_changes,
3470 properties = dict(change_id = self.tbl_changes.c.id,
3471 poolfiles = relation(PoolFile,
3472 secondary=self.tbl_changes_pool_files,
3473 backref="changeslinks"),
3474 seen = self.tbl_changes.c.seen,
3475 source = self.tbl_changes.c.source,
3476 binaries = self.tbl_changes.c.binaries,
3477 architecture = self.tbl_changes.c.architecture,
3478 distribution = self.tbl_changes.c.distribution,
3479 urgency = self.tbl_changes.c.urgency,
3480 maintainer = self.tbl_changes.c.maintainer,
3481 changedby = self.tbl_changes.c.changedby,
3482 date = self.tbl_changes.c.date,
3483 version = self.tbl_changes.c.version,
3484 files = relation(ChangePendingFile,
3485 secondary=self.tbl_changes_pending_files_map,
3486 backref="changesfile"),
3487 in_queue_id = self.tbl_changes.c.in_queue,
3488 in_queue = relation(PolicyQueue,
3489 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3490 approved_for_id = self.tbl_changes.c.approved_for))
3492 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3493 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3495 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3496 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3497 filename = self.tbl_changes_pending_files.c.filename,
3498 size = self.tbl_changes_pending_files.c.size,
3499 md5sum = self.tbl_changes_pending_files.c.md5sum,
3500 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3501 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3503 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3504 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3505 change = relation(DBChange),
3506 maintainer = relation(Maintainer,
3507 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3508 changedby = relation(Maintainer,
3509 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3510 fingerprint = relation(Fingerprint),
3511 source_files = relation(ChangePendingFile,
3512 secondary=self.tbl_changes_pending_source_files,
3513 backref="pending_sources")))
3516 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3517 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3518 keyring = relation(Keyring, backref="keyring_acl_map"),
3519 architecture = relation(Architecture)))
3521 mapper(Location, self.tbl_location,
3522 properties = dict(location_id = self.tbl_location.c.id,
3523 component_id = self.tbl_location.c.component,
3524 component = relation(Component, backref='location'),
3525 archive_id = self.tbl_location.c.archive,
3526 archive = relation(Archive),
3527 # FIXME: the 'type' column is old cruft and
3528 # should be removed in the future.
3529 archive_type = self.tbl_location.c.type),
3530 extension = validator)
3532 mapper(Maintainer, self.tbl_maintainer,
3533 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3534 maintains_sources = relation(DBSource, backref='maintainer',
3535 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3536 changed_sources = relation(DBSource, backref='changedby',
3537 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3538 extension = validator)
3540 mapper(NewComment, self.tbl_new_comments,
3541 properties = dict(comment_id = self.tbl_new_comments.c.id))
3543 mapper(Override, self.tbl_override,
3544 properties = dict(suite_id = self.tbl_override.c.suite,
3545 suite = relation(Suite, \
3546 backref=backref('overrides', lazy='dynamic')),
3547 package = self.tbl_override.c.package,
3548 component_id = self.tbl_override.c.component,
3549 component = relation(Component, \
3550 backref=backref('overrides', lazy='dynamic')),
3551 priority_id = self.tbl_override.c.priority,
3552 priority = relation(Priority, \
3553 backref=backref('overrides', lazy='dynamic')),
3554 section_id = self.tbl_override.c.section,
3555 section = relation(Section, \
3556 backref=backref('overrides', lazy='dynamic')),
3557 overridetype_id = self.tbl_override.c.type,
3558 overridetype = relation(OverrideType, \
3559 backref=backref('overrides', lazy='dynamic'))))
3561 mapper(OverrideType, self.tbl_override_type,
3562 properties = dict(overridetype = self.tbl_override_type.c.type,
3563 overridetype_id = self.tbl_override_type.c.id))
3565 mapper(PolicyQueue, self.tbl_policy_queue,
3566 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3568 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
3570 changes = relation(DBChange),
3571 policy_queue = relation(PolicyQueue, backref='uploads'),
3572 target_suite = relation(Suite),
3573 source = relation(DBSource),
3574 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
3577 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
3579 upload = relation(PolicyQueueUpload, backref='byhand'),
3583 mapper(Priority, self.tbl_priority,
3584 properties = dict(priority_id = self.tbl_priority.c.id))
3586 mapper(Section, self.tbl_section,
3587 properties = dict(section_id = self.tbl_section.c.id,
3588 section=self.tbl_section.c.section))
3590 mapper(DBSource, self.tbl_source,
3591 properties = dict(source_id = self.tbl_source.c.id,
3592 version = self.tbl_source.c.version,
3593 maintainer_id = self.tbl_source.c.maintainer,
3594 poolfile_id = self.tbl_source.c.file,
3595 poolfile = relation(PoolFile),
3596 fingerprint_id = self.tbl_source.c.sig_fpr,
3597 fingerprint = relation(Fingerprint),
3598 changedby_id = self.tbl_source.c.changedby,
3599 srcfiles = relation(DSCFile,
3600 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3601 suites = relation(Suite, secondary=self.tbl_src_associations,
3602 backref=backref('sources', lazy='dynamic')),
3603 uploaders = relation(Maintainer,
3604 secondary=self.tbl_src_uploaders),
3605 key = relation(SourceMetadata, cascade='all',
3606 collection_class=attribute_mapped_collection('key'))),
3607 extension = validator)
3609 mapper(SourceACL, self.tbl_source_acl,
3610 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3612 mapper(SrcFormat, self.tbl_src_format,
3613 properties = dict(src_format_id = self.tbl_src_format.c.id,
3614 format_name = self.tbl_src_format.c.format_name))
3616 mapper(Suite, self.tbl_suite,
3617 properties = dict(suite_id = self.tbl_suite.c.id,
3618 policy_queue = relation(PolicyQueue),
3619 copy_queues = relation(BuildQueue,
3620 secondary=self.tbl_suite_build_queue_copy),
3621 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
3622 backref=backref('suites', lazy='dynamic')),
3623 archive = relation(Archive, backref='suites')),
3624 extension = validator)
3626 mapper(Uid, self.tbl_uid,
3627 properties = dict(uid_id = self.tbl_uid.c.id,
3628 fingerprint = relation(Fingerprint)),
3629 extension = validator)
3631 mapper(UploadBlock, self.tbl_upload_blocks,
3632 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3633 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3634 uid = relation(Uid, backref="uploadblocks")))
3636 mapper(BinContents, self.tbl_bin_contents,
3638 binary = relation(DBBinary,
3639 backref=backref('contents', lazy='dynamic', cascade='all')),
3640 file = self.tbl_bin_contents.c.file))
3642 mapper(SrcContents, self.tbl_src_contents,
3644 source = relation(DBSource,
3645 backref=backref('contents', lazy='dynamic', cascade='all')),
3646 file = self.tbl_src_contents.c.file))
3648 mapper(MetadataKey, self.tbl_metadata_keys,
3650 key_id = self.tbl_metadata_keys.c.key_id,
3651 key = self.tbl_metadata_keys.c.key))
3653 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3655 binary_id = self.tbl_binaries_metadata.c.bin_id,
3656 binary = relation(DBBinary),
3657 key_id = self.tbl_binaries_metadata.c.key_id,
3658 key = relation(MetadataKey),
3659 value = self.tbl_binaries_metadata.c.value))
3661 mapper(SourceMetadata, self.tbl_source_metadata,
3663 source_id = self.tbl_source_metadata.c.src_id,
3664 source = relation(DBSource),
3665 key_id = self.tbl_source_metadata.c.key_id,
3666 key = relation(MetadataKey),
3667 value = self.tbl_source_metadata.c.value))
3669 mapper(VersionCheck, self.tbl_version_check,
3671 suite_id = self.tbl_version_check.c.suite,
3672 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
3673 reference_id = self.tbl_version_check.c.reference,
3674 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
3676 ## Connection functions
3677 def __createconn(self):
3678 from config import Config
3680 if cnf.has_key("DB::Service"):
3681 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3682 elif cnf.has_key("DB::Host"):
3684 connstr = "postgresql://%s" % cnf["DB::Host"]
3685 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3686 connstr += ":%s" % cnf["DB::Port"]
3687 connstr += "/%s" % cnf["DB::Name"]
3690 connstr = "postgresql:///%s" % cnf["DB::Name"]
3691 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3692 connstr += "?port=%s" % cnf["DB::Port"]
3694 engine_args = { 'echo': self.debug }
3695 if cnf.has_key('DB::PoolSize'):
3696 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3697 if cnf.has_key('DB::MaxOverflow'):
3698 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3699 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3700 cnf['DB::Unicode'] == 'false':
3701 engine_args['use_native_unicode'] = False
3703 # Monkey patch a new dialect in in order to support service= syntax
3704 import sqlalchemy.dialects.postgresql
3705 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3706 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3707 def create_connect_args(self, url):
3708 if str(url).startswith('postgresql://service='):
3710 servicename = str(url)[21:]
3711 return (['service=%s' % servicename], {})
3713 return PGDialect_psycopg2.create_connect_args(self, url)
3715 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3718 self.db_pg = create_engine(connstr, **engine_args)
3719 self.db_meta = MetaData()
3720 self.db_meta.bind = self.db_pg
3721 self.db_smaker = sessionmaker(bind=self.db_pg,
3725 self.__setuptables()
3726 self.__setupmappers()
3728 except OperationalError as e:
3730 utils.fubar("Cannot connect to database (%s)" % str(e))
3732 self.pid = os.getpid()
3734 def session(self, work_mem = 0):
3736 Returns a new session object. If a work_mem parameter is provided a new
3737 transaction is started and the work_mem parameter is set for this
3738 transaction. The work_mem parameter is measured in MB. A default value
3739 will be used if the parameter is not set.
3741 # reinitialize DBConn in new processes
3742 if self.pid != os.getpid():
3745 session = self.db_smaker()
3747 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
3750 __all__.append('DBConn')