5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
45 from daklib.gpg import SignedFile
52 import simplejson as json
54 from datetime import datetime, timedelta
55 from errno import ENOENT
56 from tempfile import mkstemp, mkdtemp
57 from subprocess import Popen, PIPE
58 from tarfile import TarFile
60 from inspect import getargspec
63 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
65 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
66 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
67 from sqlalchemy import types as sqltypes
68 from sqlalchemy.orm.collections import attribute_mapped_collection
69 from sqlalchemy.ext.associationproxy import association_proxy
71 # Don't remove this, we re-export the exceptions to scripts which import us
72 from sqlalchemy.exc import *
73 from sqlalchemy.orm.exc import NoResultFound
75 # Only import Config until Queue stuff is changed to store its config
77 from config import Config
78 from textutils import fix_maintainer
79 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
81 # suppress some deprecation warnings in squeeze related to sqlalchemy
83 warnings.filterwarnings('ignore', \
84 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
86 warnings.filterwarnings('ignore', \
87 "Predicate of partial index .* ignored during reflection", \
91 ################################################################################
93 # Patch in support for the debversion field type so that it works during
97 # that is for sqlalchemy 0.6
98 UserDefinedType = sqltypes.UserDefinedType
100 # this one for sqlalchemy 0.5
101 UserDefinedType = sqltypes.TypeEngine
103 class DebVersion(UserDefinedType):
104 def get_col_spec(self):
107 def bind_processor(self, dialect):
110 # ' = None' is needed for sqlalchemy 0.5:
111 def result_processor(self, dialect, coltype = None):
114 sa_major_version = sqlalchemy.__version__[0:3]
115 if sa_major_version in ["0.5", "0.6", "0.7"]:
116 from sqlalchemy.databases import postgres
117 postgres.ischema_names['debversion'] = DebVersion
119 raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py")
121 ################################################################################
123 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
125 ################################################################################
127 def session_wrapper(fn):
129 Wrapper around common ".., session=None):" handling. If the wrapped
130 function is called without passing 'session', we create a local one
131 and destroy it when the function ends.
133 Also attaches a commit_or_flush method to the session; if we created a
134 local session, this is a synonym for session.commit(), otherwise it is a
135 synonym for session.flush().
138 def wrapped(*args, **kwargs):
139 private_transaction = False
141 # Find the session object
142 session = kwargs.get('session')
145 if len(args) <= len(getargspec(fn)[0]) - 1:
146 # No session specified as last argument or in kwargs
147 private_transaction = True
148 session = kwargs['session'] = DBConn().session()
150 # Session is last argument in args
154 session = args[-1] = DBConn().session()
155 private_transaction = True
157 if private_transaction:
158 session.commit_or_flush = session.commit
160 session.commit_or_flush = session.flush
163 return fn(*args, **kwargs)
165 if private_transaction:
166 # We created a session; close it.
169 wrapped.__doc__ = fn.__doc__
170 wrapped.func_name = fn.func_name
174 __all__.append('session_wrapper')
176 ################################################################################
178 class ORMObject(object):
180 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
181 derived classes must implement the properties() method.
184 def properties(self):
186 This method should be implemented by all derived classes and returns a
187 list of the important properties. The properties 'created' and
188 'modified' will be added automatically. A suffix '_count' should be
189 added to properties that are lists or query objects. The most important
190 property name should be returned as the first element in the list
191 because it is used by repr().
197 Returns a JSON representation of the object based on the properties
198 returned from the properties() method.
201 # add created and modified
202 all_properties = self.properties() + ['created', 'modified']
203 for property in all_properties:
204 # check for list or query
205 if property[-6:] == '_count':
206 real_property = property[:-6]
207 if not hasattr(self, real_property):
209 value = getattr(self, real_property)
210 if hasattr(value, '__len__'):
213 elif hasattr(value, 'count'):
214 # query (but not during validation)
215 if self.in_validation:
217 value = value.count()
219 raise KeyError('Do not understand property %s.' % property)
221 if not hasattr(self, property):
224 value = getattr(self, property)
228 elif isinstance(value, ORMObject):
229 # use repr() for ORMObject types
232 # we want a string for all other types because json cannot
235 data[property] = value
236 return json.dumps(data)
240 Returns the name of the class.
242 return type(self).__name__
246 Returns a short string representation of the object using the first
247 element from the properties() method.
249 primary_property = self.properties()[0]
250 value = getattr(self, primary_property)
251 return '<%s %s>' % (self.classname(), str(value))
255 Returns a human readable form of the object using the properties()
258 return '<%s %s>' % (self.classname(), self.json())
260 def not_null_constraints(self):
262 Returns a list of properties that must be not NULL. Derived classes
263 should override this method if needed.
267 validation_message = \
268 "Validation failed because property '%s' must not be empty in object\n%s"
270 in_validation = False
274 This function validates the not NULL constraints as returned by
275 not_null_constraints(). It raises the DBUpdateError exception if
278 for property in self.not_null_constraints():
279 # TODO: It is a bit awkward that the mapper configuration allow
280 # directly setting the numeric _id columns. We should get rid of it
282 if hasattr(self, property + '_id') and \
283 getattr(self, property + '_id') is not None:
285 if not hasattr(self, property) or getattr(self, property) is None:
286 # str() might lead to races due to a 2nd flush
287 self.in_validation = True
288 message = self.validation_message % (property, str(self))
289 self.in_validation = False
290 raise DBUpdateError(message)
294 def get(cls, primary_key, session = None):
296 This is a support function that allows getting an object by its primary
299 Architecture.get(3[, session])
301 instead of the more verbose
303 session.query(Architecture).get(3)
305 return session.query(cls).get(primary_key)
307 def session(self, replace = False):
309 Returns the current session that is associated with the object. May
310 return None is object is in detached state.
313 return object_session(self)
315 def clone(self, session = None):
317 Clones the current object in a new session and returns the new clone. A
318 fresh session is created if the optional session parameter is not
319 provided. The function will fail if a session is provided and has
322 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
323 an existing object to allow several threads to work with their own
324 instances of an ORMObject.
326 WARNING: Only persistent (committed) objects can be cloned. Changes
327 made to the original object that are not committed yet will get lost.
328 The session of the new object will always be rolled back to avoid
332 if self.session() is None:
333 raise RuntimeError( \
334 'Method clone() failed for detached object:\n%s' % self)
335 self.session().flush()
336 mapper = object_mapper(self)
337 primary_key = mapper.primary_key_from_instance(self)
338 object_class = self.__class__
340 session = DBConn().session()
341 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
342 raise RuntimeError( \
343 'Method clone() failed due to unflushed changes in session.')
344 new_object = session.query(object_class).get(primary_key)
346 if new_object is None:
347 raise RuntimeError( \
348 'Method clone() failed for non-persistent object:\n%s' % self)
351 __all__.append('ORMObject')
353 ################################################################################
355 class Validator(MapperExtension):
357 This class calls the validate() method for each instance for the
358 'before_update' and 'before_insert' events. A global object validator is
359 used for configuring the individual mappers.
362 def before_update(self, mapper, connection, instance):
366 def before_insert(self, mapper, connection, instance):
370 validator = Validator()
372 ################################################################################
374 class Architecture(ORMObject):
375 def __init__(self, arch_string = None, description = None):
376 self.arch_string = arch_string
377 self.description = description
379 def __eq__(self, val):
380 if isinstance(val, str):
381 return (self.arch_string== val)
382 # This signals to use the normal comparison operator
383 return NotImplemented
385 def __ne__(self, val):
386 if isinstance(val, str):
387 return (self.arch_string != val)
388 # This signals to use the normal comparison operator
389 return NotImplemented
391 def properties(self):
392 return ['arch_string', 'arch_id', 'suites_count']
394 def not_null_constraints(self):
395 return ['arch_string']
397 __all__.append('Architecture')
400 def get_architecture(architecture, session=None):
402 Returns database id for given C{architecture}.
404 @type architecture: string
405 @param architecture: The name of the architecture
407 @type session: Session
408 @param session: Optional SQLA session object (a temporary one will be
409 generated if not supplied)
412 @return: Architecture object for the given arch (None if not present)
415 q = session.query(Architecture).filter_by(arch_string=architecture)
419 except NoResultFound:
422 __all__.append('get_architecture')
424 # TODO: should be removed because the implementation is too trivial
426 def get_architecture_suites(architecture, session=None):
428 Returns list of Suite objects for given C{architecture} name
430 @type architecture: str
431 @param architecture: Architecture name to search for
433 @type session: Session
434 @param session: Optional SQL session object (a temporary one will be
435 generated if not supplied)
438 @return: list of Suite objects for the given name (may be empty)
441 return get_architecture(architecture, session).suites
443 __all__.append('get_architecture_suites')
445 ################################################################################
447 class Archive(object):
448 def __init__(self, *args, **kwargs):
452 return '<Archive %s>' % self.archive_name
454 __all__.append('Archive')
457 def get_archive(archive, session=None):
459 returns database id for given C{archive}.
461 @type archive: string
462 @param archive: the name of the arhive
464 @type session: Session
465 @param session: Optional SQLA session object (a temporary one will be
466 generated if not supplied)
469 @return: Archive object for the given name (None if not present)
472 archive = archive.lower()
474 q = session.query(Archive).filter_by(archive_name=archive)
478 except NoResultFound:
481 __all__.append('get_archive')
483 ################################################################################
485 class ArchiveFile(object):
486 def __init__(self, archive=None, component=None, file=None):
487 self.archive = archive
488 self.component = component
492 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
494 __all__.append('ArchiveFile')
496 ################################################################################
498 class BinContents(ORMObject):
499 def __init__(self, file = None, binary = None):
503 def properties(self):
504 return ['file', 'binary']
506 __all__.append('BinContents')
508 ################################################################################
510 def subprocess_setup():
511 # Python installs a SIGPIPE handler by default. This is usually not what
512 # non-Python subprocesses expect.
513 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
515 class DBBinary(ORMObject):
516 def __init__(self, package = None, source = None, version = None, \
517 maintainer = None, architecture = None, poolfile = None, \
518 binarytype = 'deb', fingerprint=None):
519 self.package = package
521 self.version = version
522 self.maintainer = maintainer
523 self.architecture = architecture
524 self.poolfile = poolfile
525 self.binarytype = binarytype
526 self.fingerprint = fingerprint
530 return self.binary_id
532 def properties(self):
533 return ['package', 'version', 'maintainer', 'source', 'architecture', \
534 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
535 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
537 def not_null_constraints(self):
538 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
541 metadata = association_proxy('key', 'value')
543 def get_component_name(self):
544 return self.poolfile.location.component.component_name
546 def scan_contents(self):
548 Yields the contents of the package. Only regular files are yielded and
549 the path names are normalized after converting them from either utf-8
550 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
551 package does not contain any regular file.
553 fullpath = self.poolfile.fullpath
554 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
555 preexec_fn = subprocess_setup)
556 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
557 for member in tar.getmembers():
558 if not member.isdir():
559 name = normpath(member.name)
560 # enforce proper utf-8 encoding
563 except UnicodeDecodeError:
564 name = name.decode('iso8859-1').encode('utf-8')
570 def read_control(self):
572 Reads the control information from a binary.
575 @return: stanza text of the control section.
578 fullpath = self.poolfile.fullpath
579 deb_file = open(fullpath, 'r')
580 stanza = utils.deb_extract_control(deb_file)
585 def read_control_fields(self):
587 Reads the control information from a binary and return
591 @return: fields of the control section as a dictionary.
594 stanza = self.read_control()
595 return apt_pkg.TagSection(stanza)
597 __all__.append('DBBinary')
600 def get_suites_binary_in(package, session=None):
602 Returns list of Suite objects which given C{package} name is in
605 @param package: DBBinary package name to search for
608 @return: list of Suite objects for the given package
611 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
613 __all__.append('get_suites_binary_in')
616 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
618 Returns the component name of the newest binary package in suite_list or
619 None if no package is found. The result can be optionally filtered by a list
620 of architecture names.
623 @param package: DBBinary package name to search for
625 @type suite_list: list of str
626 @param suite_list: list of suite_name items
628 @type arch_list: list of str
629 @param arch_list: optional list of arch_string items that defaults to []
631 @rtype: str or NoneType
632 @return: name of component or None
635 q = session.query(DBBinary).filter_by(package = package). \
636 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
637 if len(arch_list) > 0:
638 q = q.join(DBBinary.architecture). \
639 filter(Architecture.arch_string.in_(arch_list))
640 binary = q.order_by(desc(DBBinary.version)).first()
644 return binary.get_component_name()
646 __all__.append('get_component_by_package_suite')
648 ################################################################################
650 class BinaryACL(object):
651 def __init__(self, *args, **kwargs):
655 return '<BinaryACL %s>' % self.binary_acl_id
657 __all__.append('BinaryACL')
659 ################################################################################
661 class BinaryACLMap(object):
662 def __init__(self, *args, **kwargs):
666 return '<BinaryACLMap %s>' % self.binary_acl_map_id
668 __all__.append('BinaryACLMap')
670 ################################################################################
675 ArchiveDir "%(archivepath)s";
676 OverrideDir "%(overridedir)s";
677 CacheDir "%(cachedir)s";
682 Packages::Compress ". bzip2 gzip";
683 Sources::Compress ". bzip2 gzip";
688 bindirectory "incoming"
693 BinOverride "override.sid.all3";
694 BinCacheDB "packages-accepted.db";
696 FileList "%(filelist)s";
699 Packages::Extensions ".deb .udeb";
702 bindirectory "incoming/"
705 BinOverride "override.sid.all3";
706 SrcOverride "override.sid.all3.src";
707 FileList "%(filelist)s";
711 class BuildQueue(object):
712 def __init__(self, *args, **kwargs):
716 return '<BuildQueue %s>' % self.queue_name
718 def write_metadata(self, starttime, force=False):
719 # Do we write out metafiles?
720 if not (force or self.generate_metadata):
723 session = DBConn().session().object_session(self)
725 fl_fd = fl_name = ac_fd = ac_name = None
727 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
728 startdir = os.getcwd()
731 # Grab files we want to include
732 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
733 newer += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
734 # Write file list with newer files
735 (fl_fd, fl_name) = mkstemp()
737 os.write(fl_fd, '%s\n' % n.fullpath)
742 # Write minimal apt.conf
743 # TODO: Remove hardcoding from template
744 (ac_fd, ac_name) = mkstemp()
745 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
747 'cachedir': cnf["Dir::Cache"],
748 'overridedir': cnf["Dir::Override"],
752 # Run apt-ftparchive generate
753 os.chdir(os.path.dirname(ac_name))
754 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
756 # Run apt-ftparchive release
757 # TODO: Eww - fix this
758 bname = os.path.basename(self.path)
762 # We have to remove the Release file otherwise it'll be included in the
765 os.unlink(os.path.join(bname, 'Release'))
769 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
771 # Crude hack with open and append, but this whole section is and should be redone.
772 if self.notautomatic:
773 release=open("Release", "a")
774 release.write("NotAutomatic: yes\n")
779 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
780 if cnf.has_key("Dinstall::SigningPubKeyring"):
781 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
783 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
785 # Move the files if we got this far
786 os.rename('Release', os.path.join(bname, 'Release'))
788 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
790 # Clean up any left behind files
817 def clean_and_update(self, starttime, Logger, dryrun=False):
818 """WARNING: This routine commits for you"""
819 session = DBConn().session().object_session(self)
821 if self.generate_metadata and not dryrun:
822 self.write_metadata(starttime)
824 # Grab files older than our execution time
825 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
826 older += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
832 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
834 Logger.log(["I: Removing %s from the queue" % o.fullpath])
835 os.unlink(o.fullpath)
838 # If it wasn't there, don't worry
839 if e.errno == ENOENT:
842 # TODO: Replace with proper logging call
843 Logger.log(["E: Could not remove %s" % o.fullpath])
850 for f in os.listdir(self.path):
851 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
854 if not self.contains_filename(f):
855 fp = os.path.join(self.path, f)
857 Logger.log(["I: Would remove unused link %s" % fp])
859 Logger.log(["I: Removing unused link %s" % fp])
863 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
865 def contains_filename(self, filename):
868 @returns True if filename is supposed to be in the queue; False otherwise
870 session = DBConn().session().object_session(self)
871 if session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id, filename = filename).count() > 0:
873 elif session.query(BuildQueuePolicyFile).filter_by(build_queue = self, filename = filename).count() > 0:
877 def add_file_from_pool(self, poolfile):
878 """Copies a file into the pool. Assumes that the PoolFile object is
879 attached to the same SQLAlchemy session as the Queue object is.
881 The caller is responsible for committing after calling this function."""
882 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
884 # Check if we have a file of this name or this ID already
885 for f in self.queuefiles:
886 if (f.fileid is not None and f.fileid == poolfile.file_id) or \
887 (f.poolfile is not None and f.poolfile.filename == poolfile_basename):
888 # In this case, update the BuildQueueFile entry so we
889 # don't remove it too early
890 f.lastused = datetime.now()
891 DBConn().session().object_session(poolfile).add(f)
894 # Prepare BuildQueueFile object
895 qf = BuildQueueFile()
896 qf.build_queue_id = self.queue_id
897 qf.filename = poolfile_basename
899 targetpath = poolfile.fullpath
900 queuepath = os.path.join(self.path, poolfile_basename)
904 # We need to copy instead of symlink
906 utils.copy(targetpath, queuepath)
907 # NULL in the fileid field implies a copy
910 os.symlink(targetpath, queuepath)
911 qf.fileid = poolfile.file_id
912 except FileExistsError:
913 if not poolfile.identical_to(queuepath):
918 # Get the same session as the PoolFile is using and add the qf to it
919 DBConn().session().object_session(poolfile).add(qf)
923 def add_changes_from_policy_queue(self, policyqueue, changes):
925 Copies a changes from a policy queue together with its poolfiles.
927 @type policyqueue: PolicyQueue
928 @param policyqueue: policy queue to copy the changes from
930 @type changes: DBChange
931 @param changes: changes to copy to this build queue
933 for policyqueuefile in changes.files:
934 self.add_file_from_policy_queue(policyqueue, policyqueuefile)
935 for poolfile in changes.poolfiles:
936 self.add_file_from_pool(poolfile)
938 def add_file_from_policy_queue(self, policyqueue, policyqueuefile):
940 Copies a file from a policy queue.
941 Assumes that the policyqueuefile is attached to the same SQLAlchemy
942 session as the Queue object is. The caller is responsible for
943 committing after calling this function.
945 @type policyqueue: PolicyQueue
946 @param policyqueue: policy queue to copy the file from
948 @type policyqueuefile: ChangePendingFile
949 @param policyqueuefile: file to be added to the build queue
951 session = DBConn().session().object_session(policyqueuefile)
953 # Is the file already there?
955 f = session.query(BuildQueuePolicyFile).filter_by(build_queue=self, file=policyqueuefile).one()
956 f.lastused = datetime.now()
958 except NoResultFound:
959 pass # continue below
961 # We have to add the file.
962 f = BuildQueuePolicyFile()
964 f.file = policyqueuefile
965 f.filename = policyqueuefile.filename
967 source = os.path.join(policyqueue.path, policyqueuefile.filename)
970 # Always copy files from policy queues as they might move around.
972 utils.copy(source, target)
973 except FileExistsError:
974 if not policyqueuefile.identical_to(target):
982 __all__.append('BuildQueue')
985 def get_build_queue(queuename, session=None):
987 Returns BuildQueue object for given C{queue name}, creating it if it does not
990 @type queuename: string
991 @param queuename: The name of the queue
993 @type session: Session
994 @param session: Optional SQLA session object (a temporary one will be
995 generated if not supplied)
998 @return: BuildQueue object for the given queue
1001 q = session.query(BuildQueue).filter_by(queue_name=queuename)
1005 except NoResultFound:
1008 __all__.append('get_build_queue')
1010 ################################################################################
1012 class BuildQueueFile(object):
1014 BuildQueueFile represents a file in a build queue coming from a pool.
1017 def __init__(self, *args, **kwargs):
1021 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
1025 return os.path.join(self.buildqueue.path, self.filename)
1028 __all__.append('BuildQueueFile')
1030 ################################################################################
1032 class BuildQueuePolicyFile(object):
1034 BuildQueuePolicyFile represents a file in a build queue that comes from a
1035 policy queue (and not a pool).
1038 def __init__(self, *args, **kwargs):
1042 #def filename(self):
1043 # return self.file.filename
1047 return os.path.join(self.build_queue.path, self.filename)
1049 __all__.append('BuildQueuePolicyFile')
1051 ################################################################################
1053 class ChangePendingBinary(object):
1054 def __init__(self, *args, **kwargs):
1058 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
1060 __all__.append('ChangePendingBinary')
1062 ################################################################################
1064 class ChangePendingFile(object):
1065 def __init__(self, *args, **kwargs):
1069 return '<ChangePendingFile %s>' % self.change_pending_file_id
1071 def identical_to(self, filename):
1073 compare size and hash with the given file
1076 @return: true if the given file has the same size and hash as this object; false otherwise
1078 st = os.stat(filename)
1079 if self.size != st.st_size:
1082 f = open(filename, "r")
1083 sha256sum = apt_pkg.sha256sum(f)
1084 if sha256sum != self.sha256sum:
1089 __all__.append('ChangePendingFile')
1091 ################################################################################
1093 class ChangePendingSource(object):
1094 def __init__(self, *args, **kwargs):
1098 return '<ChangePendingSource %s>' % self.change_pending_source_id
1100 __all__.append('ChangePendingSource')
1102 ################################################################################
1104 class Component(ORMObject):
1105 def __init__(self, component_name = None):
1106 self.component_name = component_name
1108 def __eq__(self, val):
1109 if isinstance(val, str):
1110 return (self.component_name == val)
1111 # This signals to use the normal comparison operator
1112 return NotImplemented
1114 def __ne__(self, val):
1115 if isinstance(val, str):
1116 return (self.component_name != val)
1117 # This signals to use the normal comparison operator
1118 return NotImplemented
1120 def properties(self):
1121 return ['component_name', 'component_id', 'description', \
1122 'location_count', 'meets_dfsg', 'overrides_count']
1124 def not_null_constraints(self):
1125 return ['component_name']
1128 __all__.append('Component')
1131 def get_component(component, session=None):
1133 Returns database id for given C{component}.
1135 @type component: string
1136 @param component: The name of the override type
1139 @return: the database id for the given component
1142 component = component.lower()
1144 q = session.query(Component).filter_by(component_name=component)
1148 except NoResultFound:
1151 __all__.append('get_component')
1154 def get_component_names(session=None):
1156 Returns list of strings of component names.
1159 @return: list of strings of component names
1162 return [ x.component_name for x in session.query(Component).all() ]
1164 __all__.append('get_component_names')
1166 ################################################################################
1168 class DBConfig(object):
1169 def __init__(self, *args, **kwargs):
1173 return '<DBConfig %s>' % self.name
1175 __all__.append('DBConfig')
1177 ################################################################################
1180 def get_or_set_contents_file_id(filename, session=None):
1182 Returns database id for given filename.
1184 If no matching file is found, a row is inserted.
1186 @type filename: string
1187 @param filename: The filename
1188 @type session: SQLAlchemy
1189 @param session: Optional SQL session object (a temporary one will be
1190 generated if not supplied). If not passed, a commit will be performed at
1191 the end of the function, otherwise the caller is responsible for commiting.
1194 @return: the database id for the given component
1197 q = session.query(ContentFilename).filter_by(filename=filename)
1200 ret = q.one().cafilename_id
1201 except NoResultFound:
1202 cf = ContentFilename()
1203 cf.filename = filename
1205 session.commit_or_flush()
1206 ret = cf.cafilename_id
1210 __all__.append('get_or_set_contents_file_id')
1213 def get_contents(suite, overridetype, section=None, session=None):
1215 Returns contents for a suite / overridetype combination, limiting
1216 to a section if not None.
1219 @param suite: Suite object
1221 @type overridetype: OverrideType
1222 @param overridetype: OverrideType object
1224 @type section: Section
1225 @param section: Optional section object to limit results to
1227 @type session: SQLAlchemy
1228 @param session: Optional SQL session object (a temporary one will be
1229 generated if not supplied)
1231 @rtype: ResultsProxy
1232 @return: ResultsProxy object set up to return tuples of (filename, section,
1236 # find me all of the contents for a given suite
1237 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1241 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1242 JOIN content_file_names n ON (c.filename=n.id)
1243 JOIN binaries b ON (b.id=c.binary_pkg)
1244 JOIN override o ON (o.package=b.package)
1245 JOIN section s ON (s.id=o.section)
1246 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1247 AND b.type=:overridetypename"""
1249 vals = {'suiteid': suite.suite_id,
1250 'overridetypeid': overridetype.overridetype_id,
1251 'overridetypename': overridetype.overridetype}
1253 if section is not None:
1254 contents_q += " AND s.id = :sectionid"
1255 vals['sectionid'] = section.section_id
1257 contents_q += " ORDER BY fn"
1259 return session.execute(contents_q, vals)
1261 __all__.append('get_contents')
1263 ################################################################################
1265 class ContentFilepath(object):
1266 def __init__(self, *args, **kwargs):
1270 return '<ContentFilepath %s>' % self.filepath
1272 __all__.append('ContentFilepath')
1275 def get_or_set_contents_path_id(filepath, session=None):
1277 Returns database id for given path.
1279 If no matching file is found, a row is inserted.
1281 @type filepath: string
1282 @param filepath: The filepath
1284 @type session: SQLAlchemy
1285 @param session: Optional SQL session object (a temporary one will be
1286 generated if not supplied). If not passed, a commit will be performed at
1287 the end of the function, otherwise the caller is responsible for commiting.
1290 @return: the database id for the given path
1293 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1296 ret = q.one().cafilepath_id
1297 except NoResultFound:
1298 cf = ContentFilepath()
1299 cf.filepath = filepath
1301 session.commit_or_flush()
1302 ret = cf.cafilepath_id
1306 __all__.append('get_or_set_contents_path_id')
1308 ################################################################################
1310 class ContentAssociation(object):
1311 def __init__(self, *args, **kwargs):
1315 return '<ContentAssociation %s>' % self.ca_id
1317 __all__.append('ContentAssociation')
1319 def insert_content_paths(binary_id, fullpaths, session=None):
1321 Make sure given path is associated with given binary id
1323 @type binary_id: int
1324 @param binary_id: the id of the binary
1325 @type fullpaths: list
1326 @param fullpaths: the list of paths of the file being associated with the binary
1327 @type session: SQLAlchemy session
1328 @param session: Optional SQLAlchemy session. If this is passed, the caller
1329 is responsible for ensuring a transaction has begun and committing the
1330 results or rolling back based on the result code. If not passed, a commit
1331 will be performed at the end of the function, otherwise the caller is
1332 responsible for commiting.
1334 @return: True upon success
1337 privatetrans = False
1339 session = DBConn().session()
1344 def generate_path_dicts():
1345 for fullpath in fullpaths:
1346 if fullpath.startswith( './' ):
1347 fullpath = fullpath[2:]
1349 yield {'filename':fullpath, 'id': binary_id }
1351 for d in generate_path_dicts():
1352 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1361 traceback.print_exc()
1363 # Only rollback if we set up the session ourself
1370 __all__.append('insert_content_paths')
1372 ################################################################################
1374 class DSCFile(object):
1375 def __init__(self, *args, **kwargs):
1379 return '<DSCFile %s>' % self.dscfile_id
1381 __all__.append('DSCFile')
1384 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1386 Returns a list of DSCFiles which may be empty
1388 @type dscfile_id: int (optional)
1389 @param dscfile_id: the dscfile_id of the DSCFiles to find
1391 @type source_id: int (optional)
1392 @param source_id: the source id related to the DSCFiles to find
1394 @type poolfile_id: int (optional)
1395 @param poolfile_id: the poolfile id related to the DSCFiles to find
1398 @return: Possibly empty list of DSCFiles
1401 q = session.query(DSCFile)
1403 if dscfile_id is not None:
1404 q = q.filter_by(dscfile_id=dscfile_id)
1406 if source_id is not None:
1407 q = q.filter_by(source_id=source_id)
1409 if poolfile_id is not None:
1410 q = q.filter_by(poolfile_id=poolfile_id)
1414 __all__.append('get_dscfiles')
1416 ################################################################################
1418 class ExternalOverride(ORMObject):
1419 def __init__(self, *args, **kwargs):
1423 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1425 __all__.append('ExternalOverride')
1427 ################################################################################
1429 class PoolFile(ORMObject):
1430 def __init__(self, filename = None, location = None, filesize = -1, \
1432 self.filename = filename
1433 self.location = location
1434 self.filesize = filesize
1435 self.md5sum = md5sum
1439 session = DBConn().session().object_session(self)
1440 af = session.query(ArchiveFile).join(Archive).filter(ArchiveFile.file == self).first()
1445 return os.path.basename(self.filename)
1447 def is_valid(self, filesize = -1, md5sum = None):
1448 return self.filesize == long(filesize) and self.md5sum == md5sum
1450 def properties(self):
1451 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1452 'sha256sum', 'source', 'binary', 'last_used']
1454 def not_null_constraints(self):
1455 return ['filename', 'md5sum']
1457 def identical_to(self, filename):
1459 compare size and hash with the given file
1462 @return: true if the given file has the same size and hash as this object; false otherwise
1464 st = os.stat(filename)
1465 if self.filesize != st.st_size:
1468 f = open(filename, "r")
1469 sha256sum = apt_pkg.sha256sum(f)
1470 if sha256sum != self.sha256sum:
1475 __all__.append('PoolFile')
1478 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1481 (ValidFileFound [boolean], PoolFile object or None)
1483 @type filename: string
1484 @param filename: the filename of the file to check against the DB
1487 @param filesize: the size of the file to check against the DB
1489 @type md5sum: string
1490 @param md5sum: the md5sum of the file to check against the DB
1492 @type location_id: int
1493 @param location_id: the id of the location to look in
1496 @return: Tuple of length 2.
1497 - If valid pool file found: (C{True}, C{PoolFile object})
1498 - If valid pool file not found:
1499 - (C{False}, C{None}) if no file found
1500 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1503 poolfile = session.query(Location).get(location_id). \
1504 files.filter_by(filename=filename).first()
1506 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1509 return (valid, poolfile)
1511 __all__.append('check_poolfile')
1513 # TODO: the implementation can trivially be inlined at the place where the
1514 # function is called
1516 def get_poolfile_by_id(file_id, session=None):
1518 Returns a PoolFile objects or None for the given id
1521 @param file_id: the id of the file to look for
1523 @rtype: PoolFile or None
1524 @return: either the PoolFile object or None
1527 return session.query(PoolFile).get(file_id)
1529 __all__.append('get_poolfile_by_id')
1532 def get_poolfile_like_name(filename, session=None):
1534 Returns an array of PoolFile objects which are like the given name
1536 @type filename: string
1537 @param filename: the filename of the file to check against the DB
1540 @return: array of PoolFile objects
1543 # TODO: There must be a way of properly using bind parameters with %FOO%
1544 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1548 __all__.append('get_poolfile_like_name')
1551 def add_poolfile(filename, datadict, location_id, session=None):
1553 Add a new file to the pool
1555 @type filename: string
1556 @param filename: filename
1558 @type datadict: dict
1559 @param datadict: dict with needed data
1561 @type location_id: int
1562 @param location_id: database id of the location
1565 @return: the PoolFile object created
1567 poolfile = PoolFile()
1568 poolfile.filename = filename
1569 poolfile.filesize = datadict["size"]
1570 poolfile.md5sum = datadict["md5sum"]
1571 poolfile.sha1sum = datadict["sha1sum"]
1572 poolfile.sha256sum = datadict["sha256sum"]
1573 poolfile.location_id = location_id
1575 session.add(poolfile)
1576 # Flush to get a file id (NB: This is not a commit)
1581 __all__.append('add_poolfile')
1583 ################################################################################
1585 class Fingerprint(ORMObject):
1586 def __init__(self, fingerprint = None):
1587 self.fingerprint = fingerprint
1589 def properties(self):
1590 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1593 def not_null_constraints(self):
1594 return ['fingerprint']
1596 __all__.append('Fingerprint')
1599 def get_fingerprint(fpr, session=None):
1601 Returns Fingerprint object for given fpr.
1604 @param fpr: The fpr to find / add
1606 @type session: SQLAlchemy
1607 @param session: Optional SQL session object (a temporary one will be
1608 generated if not supplied).
1611 @return: the Fingerprint object for the given fpr or None
1614 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1618 except NoResultFound:
1623 __all__.append('get_fingerprint')
1626 def get_or_set_fingerprint(fpr, session=None):
1628 Returns Fingerprint object for given fpr.
1630 If no matching fpr is found, a row is inserted.
1633 @param fpr: The fpr to find / add
1635 @type session: SQLAlchemy
1636 @param session: Optional SQL session object (a temporary one will be
1637 generated if not supplied). If not passed, a commit will be performed at
1638 the end of the function, otherwise the caller is responsible for commiting.
1639 A flush will be performed either way.
1642 @return: the Fingerprint object for the given fpr
1645 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1649 except NoResultFound:
1650 fingerprint = Fingerprint()
1651 fingerprint.fingerprint = fpr
1652 session.add(fingerprint)
1653 session.commit_or_flush()
1658 __all__.append('get_or_set_fingerprint')
1660 ################################################################################
1662 # Helper routine for Keyring class
1663 def get_ldap_name(entry):
1665 for k in ["cn", "mn", "sn"]:
1667 if ret and ret[0] != "" and ret[0] != "-":
1669 return " ".join(name)
1671 ################################################################################
1673 class Keyring(object):
1674 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1675 " --with-colons --fingerprint --fingerprint"
1680 def __init__(self, *args, **kwargs):
1684 return '<Keyring %s>' % self.keyring_name
1686 def de_escape_gpg_str(self, txt):
1687 esclist = re.split(r'(\\x..)', txt)
1688 for x in range(1,len(esclist),2):
1689 esclist[x] = "%c" % (int(esclist[x][2:],16))
1690 return "".join(esclist)
1692 def parse_address(self, uid):
1693 """parses uid and returns a tuple of real name and email address"""
1695 (name, address) = email.Utils.parseaddr(uid)
1696 name = re.sub(r"\s*[(].*[)]", "", name)
1697 name = self.de_escape_gpg_str(name)
1700 return (name, address)
1702 def load_keys(self, keyring):
1703 if not self.keyring_id:
1704 raise Exception('Must be initialized with database information')
1706 k = os.popen(self.gpg_invocation % keyring, "r")
1711 field = line.split(":")
1712 if field[0] == "pub":
1715 (name, addr) = self.parse_address(field[9])
1717 self.keys[key]["email"] = addr
1718 self.keys[key]["name"] = name
1719 self.keys[key]["fingerprints"] = []
1721 elif key and field[0] == "sub" and len(field) >= 12:
1722 signingkey = ("s" in field[11])
1723 elif key and field[0] == "uid":
1724 (name, addr) = self.parse_address(field[9])
1725 if "email" not in self.keys[key] and "@" in addr:
1726 self.keys[key]["email"] = addr
1727 self.keys[key]["name"] = name
1728 elif signingkey and field[0] == "fpr":
1729 self.keys[key]["fingerprints"].append(field[9])
1730 self.fpr_lookup[field[9]] = key
1732 def import_users_from_ldap(self, session):
1736 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1737 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1739 l = ldap.open(LDAPServer)
1740 l.simple_bind_s("","")
1741 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1742 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1743 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1745 ldap_fin_uid_id = {}
1752 uid = entry["uid"][0]
1753 name = get_ldap_name(entry)
1754 fingerprints = entry["keyFingerPrint"]
1756 for f in fingerprints:
1757 key = self.fpr_lookup.get(f, None)
1758 if key not in self.keys:
1760 self.keys[key]["uid"] = uid
1764 keyid = get_or_set_uid(uid, session).uid_id
1765 byuid[keyid] = (uid, name)
1766 byname[uid] = (keyid, name)
1768 return (byname, byuid)
1770 def generate_users_from_keyring(self, format, session):
1774 for x in self.keys.keys():
1775 if "email" not in self.keys[x]:
1777 self.keys[x]["uid"] = format % "invalid-uid"
1779 uid = format % self.keys[x]["email"]
1780 keyid = get_or_set_uid(uid, session).uid_id
1781 byuid[keyid] = (uid, self.keys[x]["name"])
1782 byname[uid] = (keyid, self.keys[x]["name"])
1783 self.keys[x]["uid"] = uid
1786 uid = format % "invalid-uid"
1787 keyid = get_or_set_uid(uid, session).uid_id
1788 byuid[keyid] = (uid, "ungeneratable user id")
1789 byname[uid] = (keyid, "ungeneratable user id")
1791 return (byname, byuid)
1793 __all__.append('Keyring')
1796 def get_keyring(keyring, session=None):
1798 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1799 If C{keyring} already has an entry, simply return the existing Keyring
1801 @type keyring: string
1802 @param keyring: the keyring name
1805 @return: the Keyring object for this keyring
1808 q = session.query(Keyring).filter_by(keyring_name=keyring)
1812 except NoResultFound:
1815 __all__.append('get_keyring')
1818 def get_active_keyring_paths(session=None):
1821 @return: list of active keyring paths
1823 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1825 __all__.append('get_active_keyring_paths')
1828 def get_primary_keyring_path(session=None):
1830 Get the full path to the highest priority active keyring
1833 @return: path to the active keyring with the highest priority or None if no
1834 keyring is configured
1836 keyrings = get_active_keyring_paths()
1838 if len(keyrings) > 0:
1843 __all__.append('get_primary_keyring_path')
1845 ################################################################################
1847 class KeyringACLMap(object):
1848 def __init__(self, *args, **kwargs):
1852 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1854 __all__.append('KeyringACLMap')
1856 ################################################################################
1858 class DBChange(object):
1859 def __init__(self, *args, **kwargs):
1863 return '<DBChange %s>' % self.changesname
1865 def clean_from_queue(self):
1866 session = DBConn().session().object_session(self)
1868 # Remove changes_pool_files entries
1871 # Remove changes_pending_files references
1874 # Clear out of queue
1875 self.in_queue = None
1876 self.approved_for_id = None
1878 __all__.append('DBChange')
1881 def get_dbchange(filename, session=None):
1883 returns DBChange object for given C{filename}.
1885 @type filename: string
1886 @param filename: the name of the file
1888 @type session: Session
1889 @param session: Optional SQLA session object (a temporary one will be
1890 generated if not supplied)
1893 @return: DBChange object for the given filename (C{None} if not present)
1896 q = session.query(DBChange).filter_by(changesname=filename)
1900 except NoResultFound:
1903 __all__.append('get_dbchange')
1905 ################################################################################
1907 class Location(ORMObject):
1908 def __init__(self, path = None, component = None):
1910 self.component = component
1911 # the column 'type' should go away, see comment at mapper
1912 self.archive_type = 'pool'
1914 def properties(self):
1915 return ['path', 'location_id', 'archive_type', 'component', \
1918 def not_null_constraints(self):
1919 return ['path', 'archive_type']
1921 __all__.append('Location')
1924 def get_location(location, component=None, archive=None, session=None):
1926 Returns Location object for the given combination of location, component
1929 @type location: string
1930 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1932 @type component: string
1933 @param component: the component name (if None, no restriction applied)
1935 @type archive: string
1936 @param archive: the archive name (if None, no restriction applied)
1938 @rtype: Location / None
1939 @return: Either a Location object or None if one can't be found
1942 q = session.query(Location).filter_by(path=location)
1944 if archive is not None:
1945 q = q.join(Archive).filter_by(archive_name=archive)
1947 if component is not None:
1948 q = q.join(Component).filter_by(component_name=component)
1952 except NoResultFound:
1955 __all__.append('get_location')
1957 ################################################################################
1959 class Maintainer(ORMObject):
1960 def __init__(self, name = None):
1963 def properties(self):
1964 return ['name', 'maintainer_id']
1966 def not_null_constraints(self):
1969 def get_split_maintainer(self):
1970 if not hasattr(self, 'name') or self.name is None:
1971 return ('', '', '', '')
1973 return fix_maintainer(self.name.strip())
1975 __all__.append('Maintainer')
1978 def get_or_set_maintainer(name, session=None):
1980 Returns Maintainer object for given maintainer name.
1982 If no matching maintainer name is found, a row is inserted.
1985 @param name: The maintainer name to add
1987 @type session: SQLAlchemy
1988 @param session: Optional SQL session object (a temporary one will be
1989 generated if not supplied). If not passed, a commit will be performed at
1990 the end of the function, otherwise the caller is responsible for commiting.
1991 A flush will be performed either way.
1994 @return: the Maintainer object for the given maintainer
1997 q = session.query(Maintainer).filter_by(name=name)
2000 except NoResultFound:
2001 maintainer = Maintainer()
2002 maintainer.name = name
2003 session.add(maintainer)
2004 session.commit_or_flush()
2009 __all__.append('get_or_set_maintainer')
2012 def get_maintainer(maintainer_id, session=None):
2014 Return the name of the maintainer behind C{maintainer_id} or None if that
2015 maintainer_id is invalid.
2017 @type maintainer_id: int
2018 @param maintainer_id: the id of the maintainer
2021 @return: the Maintainer with this C{maintainer_id}
2024 return session.query(Maintainer).get(maintainer_id)
2026 __all__.append('get_maintainer')
2028 ################################################################################
2030 class NewComment(object):
2031 def __init__(self, *args, **kwargs):
2035 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
2037 __all__.append('NewComment')
2040 def has_new_comment(package, version, session=None):
2042 Returns true if the given combination of C{package}, C{version} has a comment.
2044 @type package: string
2045 @param package: name of the package
2047 @type version: string
2048 @param version: package version
2050 @type session: Session
2051 @param session: Optional SQLA session object (a temporary one will be
2052 generated if not supplied)
2058 q = session.query(NewComment)
2059 q = q.filter_by(package=package)
2060 q = q.filter_by(version=version)
2062 return bool(q.count() > 0)
2064 __all__.append('has_new_comment')
2067 def get_new_comments(package=None, version=None, comment_id=None, session=None):
2069 Returns (possibly empty) list of NewComment objects for the given
2072 @type package: string (optional)
2073 @param package: name of the package
2075 @type version: string (optional)
2076 @param version: package version
2078 @type comment_id: int (optional)
2079 @param comment_id: An id of a comment
2081 @type session: Session
2082 @param session: Optional SQLA session object (a temporary one will be
2083 generated if not supplied)
2086 @return: A (possibly empty) list of NewComment objects will be returned
2089 q = session.query(NewComment)
2090 if package is not None: q = q.filter_by(package=package)
2091 if version is not None: q = q.filter_by(version=version)
2092 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
2096 __all__.append('get_new_comments')
2098 ################################################################################
2100 class Override(ORMObject):
2101 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
2102 section = None, priority = None):
2103 self.package = package
2105 self.component = component
2106 self.overridetype = overridetype
2107 self.section = section
2108 self.priority = priority
2110 def properties(self):
2111 return ['package', 'suite', 'component', 'overridetype', 'section', \
2114 def not_null_constraints(self):
2115 return ['package', 'suite', 'component', 'overridetype', 'section']
2117 __all__.append('Override')
2120 def get_override(package, suite=None, component=None, overridetype=None, session=None):
2122 Returns Override object for the given parameters
2124 @type package: string
2125 @param package: The name of the package
2127 @type suite: string, list or None
2128 @param suite: The name of the suite (or suites if a list) to limit to. If
2129 None, don't limit. Defaults to None.
2131 @type component: string, list or None
2132 @param component: The name of the component (or components if a list) to
2133 limit to. If None, don't limit. Defaults to None.
2135 @type overridetype: string, list or None
2136 @param overridetype: The name of the overridetype (or overridetypes if a list) to
2137 limit to. If None, don't limit. Defaults to None.
2139 @type session: Session
2140 @param session: Optional SQLA session object (a temporary one will be
2141 generated if not supplied)
2144 @return: A (possibly empty) list of Override objects will be returned
2147 q = session.query(Override)
2148 q = q.filter_by(package=package)
2150 if suite is not None:
2151 if not isinstance(suite, list): suite = [suite]
2152 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
2154 if component is not None:
2155 if not isinstance(component, list): component = [component]
2156 q = q.join(Component).filter(Component.component_name.in_(component))
2158 if overridetype is not None:
2159 if not isinstance(overridetype, list): overridetype = [overridetype]
2160 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
2164 __all__.append('get_override')
2167 ################################################################################
2169 class OverrideType(ORMObject):
2170 def __init__(self, overridetype = None):
2171 self.overridetype = overridetype
2173 def properties(self):
2174 return ['overridetype', 'overridetype_id', 'overrides_count']
2176 def not_null_constraints(self):
2177 return ['overridetype']
2179 __all__.append('OverrideType')
2182 def get_override_type(override_type, session=None):
2184 Returns OverrideType object for given C{override type}.
2186 @type override_type: string
2187 @param override_type: The name of the override type
2189 @type session: Session
2190 @param session: Optional SQLA session object (a temporary one will be
2191 generated if not supplied)
2194 @return: the database id for the given override type
2197 q = session.query(OverrideType).filter_by(overridetype=override_type)
2201 except NoResultFound:
2204 __all__.append('get_override_type')
2206 ################################################################################
2208 class PolicyQueue(object):
2209 def __init__(self, *args, **kwargs):
2213 return '<PolicyQueue %s>' % self.queue_name
2215 __all__.append('PolicyQueue')
2218 def get_policy_queue(queuename, session=None):
2220 Returns PolicyQueue object for given C{queue name}
2222 @type queuename: string
2223 @param queuename: The name of the queue
2225 @type session: Session
2226 @param session: Optional SQLA session object (a temporary one will be
2227 generated if not supplied)
2230 @return: PolicyQueue object for the given queue
2233 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2237 except NoResultFound:
2240 __all__.append('get_policy_queue')
2243 def get_policy_queue_from_path(pathname, session=None):
2245 Returns PolicyQueue object for given C{path name}
2247 @type queuename: string
2248 @param queuename: The path
2250 @type session: Session
2251 @param session: Optional SQLA session object (a temporary one will be
2252 generated if not supplied)
2255 @return: PolicyQueue object for the given queue
2258 q = session.query(PolicyQueue).filter_by(path=pathname)
2262 except NoResultFound:
2265 __all__.append('get_policy_queue_from_path')
2267 ################################################################################
2269 class PolicyQueueUpload(object):
2270 def __cmp__(self, other):
2271 ret = cmp(self.changes.source, other.changes.source)
2273 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
2275 if self.source is not None and other.source is None:
2277 elif self.source is None and other.source is not None:
2280 ret = cmp(self.changes.changesname, other.changes.changesname)
2283 __all__.append('PolicyQueueUpload')
2285 ################################################################################
2287 class PolicyQueueByhandFile(object):
2290 __all__.append('PolicyQueueByhandFile')
2292 ################################################################################
2294 class Priority(ORMObject):
2295 def __init__(self, priority = None, level = None):
2296 self.priority = priority
2299 def properties(self):
2300 return ['priority', 'priority_id', 'level', 'overrides_count']
2302 def not_null_constraints(self):
2303 return ['priority', 'level']
2305 def __eq__(self, val):
2306 if isinstance(val, str):
2307 return (self.priority == val)
2308 # This signals to use the normal comparison operator
2309 return NotImplemented
2311 def __ne__(self, val):
2312 if isinstance(val, str):
2313 return (self.priority != val)
2314 # This signals to use the normal comparison operator
2315 return NotImplemented
2317 __all__.append('Priority')
2320 def get_priority(priority, session=None):
2322 Returns Priority object for given C{priority name}.
2324 @type priority: string
2325 @param priority: The name of the priority
2327 @type session: Session
2328 @param session: Optional SQLA session object (a temporary one will be
2329 generated if not supplied)
2332 @return: Priority object for the given priority
2335 q = session.query(Priority).filter_by(priority=priority)
2339 except NoResultFound:
2342 __all__.append('get_priority')
2345 def get_priorities(session=None):
2347 Returns dictionary of priority names -> id mappings
2349 @type session: Session
2350 @param session: Optional SQL session object (a temporary one will be
2351 generated if not supplied)
2354 @return: dictionary of priority names -> id mappings
2358 q = session.query(Priority)
2360 ret[x.priority] = x.priority_id
2364 __all__.append('get_priorities')
2366 ################################################################################
2368 class Section(ORMObject):
2369 def __init__(self, section = None):
2370 self.section = section
2372 def properties(self):
2373 return ['section', 'section_id', 'overrides_count']
2375 def not_null_constraints(self):
2378 def __eq__(self, val):
2379 if isinstance(val, str):
2380 return (self.section == val)
2381 # This signals to use the normal comparison operator
2382 return NotImplemented
2384 def __ne__(self, val):
2385 if isinstance(val, str):
2386 return (self.section != val)
2387 # This signals to use the normal comparison operator
2388 return NotImplemented
2390 __all__.append('Section')
2393 def get_section(section, session=None):
2395 Returns Section object for given C{section name}.
2397 @type section: string
2398 @param section: The name of the section
2400 @type session: Session
2401 @param session: Optional SQLA session object (a temporary one will be
2402 generated if not supplied)
2405 @return: Section object for the given section name
2408 q = session.query(Section).filter_by(section=section)
2412 except NoResultFound:
2415 __all__.append('get_section')
2418 def get_sections(session=None):
2420 Returns dictionary of section names -> id mappings
2422 @type session: Session
2423 @param session: Optional SQL session object (a temporary one will be
2424 generated if not supplied)
2427 @return: dictionary of section names -> id mappings
2431 q = session.query(Section)
2433 ret[x.section] = x.section_id
2437 __all__.append('get_sections')
2439 ################################################################################
2441 class SrcContents(ORMObject):
2442 def __init__(self, file = None, source = None):
2444 self.source = source
2446 def properties(self):
2447 return ['file', 'source']
2449 __all__.append('SrcContents')
2451 ################################################################################
2453 class DBSource(ORMObject):
2454 def __init__(self, source = None, version = None, maintainer = None, \
2455 changedby = None, poolfile = None, install_date = None, fingerprint = None):
2456 self.source = source
2457 self.version = version
2458 self.maintainer = maintainer
2459 self.changedby = changedby
2460 self.poolfile = poolfile
2461 self.install_date = install_date
2462 self.fingerprint = fingerprint
2466 return self.source_id
2468 def properties(self):
2469 return ['source', 'source_id', 'maintainer', 'changedby', \
2470 'fingerprint', 'poolfile', 'version', 'suites_count', \
2471 'install_date', 'binaries_count', 'uploaders_count']
2473 def not_null_constraints(self):
2474 return ['source', 'version', 'install_date', 'maintainer', \
2475 'changedby', 'poolfile']
2477 def read_control_fields(self):
2479 Reads the control information from a dsc
2482 @return: fields is the dsc information in a dictionary form
2484 fullpath = self.poolfile.fullpath
2485 contents = open(fullpath, 'r').read()
2486 signed_file = SignedFile(contents, keyrings=[], require_signature=False)
2487 fields = apt_pkg.TagSection(signed_file.contents)
2490 metadata = association_proxy('key', 'value')
2492 def get_component_name(self):
2493 return self.poolfile.location.component.component_name
2495 def scan_contents(self):
2497 Returns a set of names for non directories. The path names are
2498 normalized after converting them from either utf-8 or iso8859-1
2501 fullpath = self.poolfile.fullpath
2502 from daklib.contents import UnpackedSource
2503 unpacked = UnpackedSource(fullpath)
2505 for name in unpacked.get_all_filenames():
2506 # enforce proper utf-8 encoding
2508 name.decode('utf-8')
2509 except UnicodeDecodeError:
2510 name = name.decode('iso8859-1').encode('utf-8')
2514 __all__.append('DBSource')
2517 def source_exists(source, source_version, suites = ["any"], session=None):
2519 Ensure that source exists somewhere in the archive for the binary
2520 upload being processed.
2521 1. exact match => 1.0-3
2522 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2524 @type source: string
2525 @param source: source name
2527 @type source_version: string
2528 @param source_version: expected source version
2531 @param suites: list of suites to check in, default I{any}
2533 @type session: Session
2534 @param session: Optional SQLA session object (a temporary one will be
2535 generated if not supplied)
2538 @return: returns 1 if a source with expected version is found, otherwise 0
2545 from daklib.regexes import re_bin_only_nmu
2546 orig_source_version = re_bin_only_nmu.sub('', source_version)
2548 for suite in suites:
2549 q = session.query(DBSource).filter_by(source=source). \
2550 filter(DBSource.version.in_([source_version, orig_source_version]))
2552 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2553 s = get_suite(suite, session)
2555 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2556 considered_suites = [ vc.reference for vc in enhances_vcs ]
2557 considered_suites.append(s)
2559 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2564 # No source found so return not ok
2569 __all__.append('source_exists')
2572 def get_suites_source_in(source, session=None):
2574 Returns list of Suite objects which given C{source} name is in
2577 @param source: DBSource package name to search for
2580 @return: list of Suite objects for the given source
2583 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2585 __all__.append('get_suites_source_in')
2588 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2590 Returns list of DBSource objects for given C{source} name and other parameters
2593 @param source: DBSource package name to search for
2595 @type version: str or None
2596 @param version: DBSource version name to search for or None if not applicable
2598 @type dm_upload_allowed: bool
2599 @param dm_upload_allowed: If None, no effect. If True or False, only
2600 return packages with that dm_upload_allowed setting
2602 @type session: Session
2603 @param session: Optional SQL session object (a temporary one will be
2604 generated if not supplied)
2607 @return: list of DBSource objects for the given name (may be empty)
2610 q = session.query(DBSource).filter_by(source=source)
2612 if version is not None:
2613 q = q.filter_by(version=version)
2615 if dm_upload_allowed is not None:
2616 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2620 __all__.append('get_sources_from_name')
2622 # FIXME: This function fails badly if it finds more than 1 source package and
2623 # its implementation is trivial enough to be inlined.
2625 def get_source_in_suite(source, suite, session=None):
2627 Returns a DBSource object for a combination of C{source} and C{suite}.
2629 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2630 - B{suite} - a suite name, eg. I{unstable}
2632 @type source: string
2633 @param source: source package name
2636 @param suite: the suite name
2639 @return: the version for I{source} in I{suite}
2643 q = get_suite(suite, session).get_sources(source)
2646 except NoResultFound:
2649 __all__.append('get_source_in_suite')
2652 def import_metadata_into_db(obj, session=None):
2654 This routine works on either DBBinary or DBSource objects and imports
2655 their metadata into the database
2657 fields = obj.read_control_fields()
2658 for k in fields.keys():
2661 val = str(fields[k])
2662 except UnicodeEncodeError:
2663 # Fall back to UTF-8
2665 val = fields[k].encode('utf-8')
2666 except UnicodeEncodeError:
2667 # Finally try iso8859-1
2668 val = fields[k].encode('iso8859-1')
2669 # Otherwise we allow the exception to percolate up and we cause
2670 # a reject as someone is playing silly buggers
2672 obj.metadata[get_or_set_metadatakey(k, session)] = val
2674 session.commit_or_flush()
2676 __all__.append('import_metadata_into_db')
2679 ################################################################################
2681 def split_uploaders(uploaders_list):
2683 Split the Uploaders field into the individual uploaders and yield each of
2684 them. Beware: email addresses might contain commas.
2687 for uploader in re.sub(">[ ]*,", ">\t", uploaders_list).split("\t"):
2688 yield uploader.strip()
2691 def add_dsc_to_db(u, filename, session=None):
2692 entry = u.pkg.files[filename]
2696 source.source = u.pkg.dsc["source"]
2697 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2698 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2699 # If Changed-By isn't available, fall back to maintainer
2700 if u.pkg.changes.has_key("changed-by"):
2701 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2703 source.changedby_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2704 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2705 source.install_date = datetime.now().date()
2707 dsc_component = entry["component"]
2708 dsc_location_id = entry["location id"]
2710 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2712 # Set up a new poolfile if necessary
2713 if not entry.has_key("files id") or not entry["files id"]:
2714 filename = entry["pool name"] + filename
2715 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2717 pfs.append(poolfile)
2718 entry["files id"] = poolfile.file_id
2720 source.poolfile_id = entry["files id"]
2723 suite_names = u.pkg.changes["distribution"].keys()
2724 source.suites = session.query(Suite). \
2725 filter(Suite.suite_name.in_(suite_names)).all()
2727 # Add the source files to the DB (files and dsc_files)
2729 dscfile.source_id = source.source_id
2730 dscfile.poolfile_id = entry["files id"]
2731 session.add(dscfile)
2733 for dsc_file, dentry in u.pkg.dsc_files.items():
2735 df.source_id = source.source_id
2737 # If the .orig tarball is already in the pool, it's
2738 # files id is stored in dsc_files by check_dsc().
2739 files_id = dentry.get("files id", None)
2741 # Find the entry in the files hash
2742 # TODO: Bail out here properly
2744 for f, e in u.pkg.files.items():
2749 if files_id is None:
2750 filename = dfentry["pool name"] + dsc_file
2752 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2753 # FIXME: needs to check for -1/-2 and or handle exception
2754 if found and obj is not None:
2755 files_id = obj.file_id
2758 # If still not found, add it
2759 if files_id is None:
2760 # HACK: Force sha1sum etc into dentry
2761 dentry["sha1sum"] = dfentry["sha1sum"]
2762 dentry["sha256sum"] = dfentry["sha256sum"]
2763 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2764 pfs.append(poolfile)
2765 files_id = poolfile.file_id
2767 poolfile = get_poolfile_by_id(files_id, session)
2768 if poolfile is None:
2769 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2770 pfs.append(poolfile)
2772 df.poolfile_id = files_id
2775 # Add the src_uploaders to the DB
2777 session.refresh(source)
2778 source.uploaders = [source.maintainer]
2779 if u.pkg.dsc.has_key("uploaders"):
2780 for up in split_uploaders(u.pkg.dsc["uploaders"]):
2781 source.uploaders.append(get_or_set_maintainer(up, session))
2785 return source, dsc_component, dsc_location_id, pfs
2787 __all__.append('add_dsc_to_db')
2790 def add_deb_to_db(u, filename, session=None):
2792 Contrary to what you might expect, this routine deals with both
2793 debs and udebs. That info is in 'dbtype', whilst 'type' is
2794 'deb' for both of them
2797 entry = u.pkg.files[filename]
2800 bin.package = entry["package"]
2801 bin.version = entry["version"]
2802 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2803 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2804 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2805 bin.binarytype = entry["dbtype"]
2808 filename = entry["pool name"] + filename
2809 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2810 if not entry.get("location id", None):
2811 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2813 if entry.get("files id", None):
2814 poolfile = get_poolfile_by_id(bin.poolfile_id)
2815 bin.poolfile_id = entry["files id"]
2817 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2818 bin.poolfile_id = entry["files id"] = poolfile.file_id
2821 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2823 # If we couldn't find anything and the upload contains Arch: source,
2824 # fall back to trying the source package, source version uploaded
2825 # This maintains backwards compatibility with previous dak behaviour
2826 # and deals with slightly broken binary debs which don't properly
2827 # declare their source package name
2828 if len(bin_sources) == 0:
2829 if u.pkg.changes["architecture"].has_key("source") \
2830 and u.pkg.dsc.has_key("source") and u.pkg.dsc.has_key("version"):
2831 bin_sources = get_sources_from_name(u.pkg.dsc["source"], u.pkg.dsc["version"], session=session)
2833 # If we couldn't find a source here, we reject
2834 # TODO: Fix this so that it doesn't kill process-upload and instead just
2835 # performs a reject. To be honest, we should probably spot this
2836 # *much* earlier than here
2837 if len(bin_sources) != 1:
2838 raise NoSourceFieldError("Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2839 (bin.package, bin.version, entry["architecture"],
2840 filename, bin.binarytype, u.pkg.changes["fingerprint"]))
2842 bin.source_id = bin_sources[0].source_id
2844 if entry.has_key("built-using"):
2845 for srcname, version in entry["built-using"]:
2846 exsources = get_sources_from_name(srcname, version, session=session)
2847 if len(exsources) != 1:
2848 raise NoSourceFieldError("Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2849 (srcname, version, bin.package, bin.version, entry["architecture"],
2850 filename, bin.binarytype, u.pkg.changes["fingerprint"]))
2852 bin.extra_sources.append(exsources[0])
2854 # Add and flush object so it has an ID
2857 suite_names = u.pkg.changes["distribution"].keys()
2858 bin.suites = session.query(Suite). \
2859 filter(Suite.suite_name.in_(suite_names)).all()
2863 # Deal with contents - disabled for now
2864 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2866 # print "REJECT\nCould not determine contents of package %s" % bin.package
2867 # session.rollback()
2868 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2870 return bin, poolfile
2872 __all__.append('add_deb_to_db')
2874 ################################################################################
2876 class SourceACL(object):
2877 def __init__(self, *args, **kwargs):
2881 return '<SourceACL %s>' % self.source_acl_id
2883 __all__.append('SourceACL')
2885 ################################################################################
2887 class SrcFormat(object):
2888 def __init__(self, *args, **kwargs):
2892 return '<SrcFormat %s>' % (self.format_name)
2894 __all__.append('SrcFormat')
2896 ################################################################################
2898 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2899 ('SuiteID', 'suite_id'),
2900 ('Version', 'version'),
2901 ('Origin', 'origin'),
2903 ('Description', 'description'),
2904 ('Untouchable', 'untouchable'),
2905 ('Announce', 'announce'),
2906 ('Codename', 'codename'),
2907 ('OverrideCodename', 'overridecodename'),
2908 ('ValidTime', 'validtime'),
2909 ('Priority', 'priority'),
2910 ('NotAutomatic', 'notautomatic'),
2911 ('CopyChanges', 'copychanges'),
2912 ('OverrideSuite', 'overridesuite')]
2914 # Why the heck don't we have any UNIQUE constraints in table suite?
2915 # TODO: Add UNIQUE constraints for appropriate columns.
2916 class Suite(ORMObject):
2917 def __init__(self, suite_name = None, version = None):
2918 self.suite_name = suite_name
2919 self.version = version
2921 def properties(self):
2922 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2925 def not_null_constraints(self):
2926 return ['suite_name']
2928 def __eq__(self, val):
2929 if isinstance(val, str):
2930 return (self.suite_name == val)
2931 # This signals to use the normal comparison operator
2932 return NotImplemented
2934 def __ne__(self, val):
2935 if isinstance(val, str):
2936 return (self.suite_name != val)
2937 # This signals to use the normal comparison operator
2938 return NotImplemented
2942 for disp, field in SUITE_FIELDS:
2943 val = getattr(self, field, None)
2945 ret.append("%s: %s" % (disp, val))
2947 return "\n".join(ret)
2949 def get_architectures(self, skipsrc=False, skipall=False):
2951 Returns list of Architecture objects
2953 @type skipsrc: boolean
2954 @param skipsrc: Whether to skip returning the 'source' architecture entry
2957 @type skipall: boolean
2958 @param skipall: Whether to skip returning the 'all' architecture entry
2962 @return: list of Architecture objects for the given name (may be empty)
2965 q = object_session(self).query(Architecture).with_parent(self)
2967 q = q.filter(Architecture.arch_string != 'source')
2969 q = q.filter(Architecture.arch_string != 'all')
2970 return q.order_by(Architecture.arch_string).all()
2972 def get_sources(self, source):
2974 Returns a query object representing DBSource that is part of C{suite}.
2976 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2978 @type source: string
2979 @param source: source package name
2981 @rtype: sqlalchemy.orm.query.Query
2982 @return: a query of DBSource
2986 session = object_session(self)
2987 return session.query(DBSource).filter_by(source = source). \
2990 def get_overridesuite(self):
2991 if self.overridesuite is None:
2994 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2998 return os.path.join(self.archive.path, 'dists', self.suite_name)
3000 __all__.append('Suite')
3003 def get_suite(suite, session=None):
3005 Returns Suite object for given C{suite name}.
3008 @param suite: The name of the suite
3010 @type session: Session
3011 @param session: Optional SQLA session object (a temporary one will be
3012 generated if not supplied)
3015 @return: Suite object for the requested suite name (None if not present)
3018 q = session.query(Suite).filter_by(suite_name=suite)
3022 except NoResultFound:
3025 __all__.append('get_suite')
3027 ################################################################################
3030 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
3032 Returns list of Architecture objects for given C{suite} name. The list is
3033 empty if suite does not exist.
3036 @param suite: Suite name to search for
3038 @type skipsrc: boolean
3039 @param skipsrc: Whether to skip returning the 'source' architecture entry
3042 @type skipall: boolean
3043 @param skipall: Whether to skip returning the 'all' architecture entry
3046 @type session: Session
3047 @param session: Optional SQL session object (a temporary one will be
3048 generated if not supplied)
3051 @return: list of Architecture objects for the given name (may be empty)
3055 return get_suite(suite, session).get_architectures(skipsrc, skipall)
3056 except AttributeError:
3059 __all__.append('get_suite_architectures')
3061 ################################################################################
3063 class Uid(ORMObject):
3064 def __init__(self, uid = None, name = None):
3068 def __eq__(self, val):
3069 if isinstance(val, str):
3070 return (self.uid == val)
3071 # This signals to use the normal comparison operator
3072 return NotImplemented
3074 def __ne__(self, val):
3075 if isinstance(val, str):
3076 return (self.uid != val)
3077 # This signals to use the normal comparison operator
3078 return NotImplemented
3080 def properties(self):
3081 return ['uid', 'name', 'fingerprint']
3083 def not_null_constraints(self):
3086 __all__.append('Uid')
3089 def get_or_set_uid(uidname, session=None):
3091 Returns uid object for given uidname.
3093 If no matching uidname is found, a row is inserted.
3095 @type uidname: string
3096 @param uidname: The uid to add
3098 @type session: SQLAlchemy
3099 @param session: Optional SQL session object (a temporary one will be
3100 generated if not supplied). If not passed, a commit will be performed at
3101 the end of the function, otherwise the caller is responsible for commiting.
3104 @return: the uid object for the given uidname
3107 q = session.query(Uid).filter_by(uid=uidname)
3111 except NoResultFound:
3115 session.commit_or_flush()
3120 __all__.append('get_or_set_uid')
3123 def get_uid_from_fingerprint(fpr, session=None):
3124 q = session.query(Uid)
3125 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
3129 except NoResultFound:
3132 __all__.append('get_uid_from_fingerprint')
3134 ################################################################################
3136 class UploadBlock(object):
3137 def __init__(self, *args, **kwargs):
3141 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
3143 __all__.append('UploadBlock')
3145 ################################################################################
3147 class MetadataKey(ORMObject):
3148 def __init__(self, key = None):
3151 def properties(self):
3154 def not_null_constraints(self):
3157 __all__.append('MetadataKey')
3160 def get_or_set_metadatakey(keyname, session=None):
3162 Returns MetadataKey object for given uidname.
3164 If no matching keyname is found, a row is inserted.
3166 @type uidname: string
3167 @param uidname: The keyname to add
3169 @type session: SQLAlchemy
3170 @param session: Optional SQL session object (a temporary one will be
3171 generated if not supplied). If not passed, a commit will be performed at
3172 the end of the function, otherwise the caller is responsible for commiting.
3175 @return: the metadatakey object for the given keyname
3178 q = session.query(MetadataKey).filter_by(key=keyname)
3182 except NoResultFound:
3183 ret = MetadataKey(keyname)
3185 session.commit_or_flush()
3189 __all__.append('get_or_set_metadatakey')
3191 ################################################################################
3193 class BinaryMetadata(ORMObject):
3194 def __init__(self, key = None, value = None, binary = None):
3197 self.binary = binary
3199 def properties(self):
3200 return ['binary', 'key', 'value']
3202 def not_null_constraints(self):
3205 __all__.append('BinaryMetadata')
3207 ################################################################################
3209 class SourceMetadata(ORMObject):
3210 def __init__(self, key = None, value = None, source = None):
3213 self.source = source
3215 def properties(self):
3216 return ['source', 'key', 'value']
3218 def not_null_constraints(self):
3221 __all__.append('SourceMetadata')
3223 ################################################################################
3225 class VersionCheck(ORMObject):
3226 def __init__(self, *args, **kwargs):
3229 def properties(self):
3230 #return ['suite_id', 'check', 'reference_id']
3233 def not_null_constraints(self):
3234 return ['suite', 'check', 'reference']
3236 __all__.append('VersionCheck')
3239 def get_version_checks(suite_name, check = None, session = None):
3240 suite = get_suite(suite_name, session)
3242 # Make sure that what we return is iterable so that list comprehensions
3243 # involving this don't cause a traceback
3245 q = session.query(VersionCheck).filter_by(suite=suite)
3247 q = q.filter_by(check=check)
3250 __all__.append('get_version_checks')
3252 ################################################################################
3254 class DBConn(object):
3256 database module init.
3260 def __init__(self, *args, **kwargs):
3261 self.__dict__ = self.__shared_state
3263 if not getattr(self, 'initialised', False):
3264 self.initialised = True
3265 self.debug = kwargs.has_key('debug')
3268 def __setuptables(self):
3275 'binaries_metadata',
3279 'build_queue_files',
3280 'build_queue_policy_files',
3285 'changes_pending_binaries',
3286 'changes_pending_files',
3287 'changes_pending_source',
3288 'changes_pending_files_map',
3289 'changes_pending_source_files',
3290 'changes_pool_files',
3292 'external_overrides',
3293 'extra_src_references',
3295 'files_archive_map',
3303 # TODO: the maintainer column in table override should be removed.
3307 'policy_queue_upload',
3308 'policy_queue_upload_binaries_map',
3309 'policy_queue_byhand_file',
3320 'suite_architectures',
3321 'suite_build_queue_copy',
3322 'suite_src_formats',
3329 'almost_obsolete_all_associations',
3330 'almost_obsolete_src_associations',
3331 'any_associations_source',
3332 'bin_associations_binaries',
3333 'binaries_suite_arch',
3336 'newest_all_associations',
3337 'newest_any_associations',
3339 'newest_src_association',
3340 'obsolete_all_associations',
3341 'obsolete_any_associations',
3342 'obsolete_any_by_all_associations',
3343 'obsolete_src_associations',
3345 'src_associations_bin',
3346 'src_associations_src',
3347 'suite_arch_by_name',
3350 for table_name in tables:
3351 table = Table(table_name, self.db_meta, \
3352 autoload=True, useexisting=True)
3353 setattr(self, 'tbl_%s' % table_name, table)
3355 for view_name in views:
3356 view = Table(view_name, self.db_meta, autoload=True)
3357 setattr(self, 'view_%s' % view_name, view)
3359 def __setupmappers(self):
3360 mapper(Architecture, self.tbl_architecture,
3361 properties = dict(arch_id = self.tbl_architecture.c.id,
3362 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3363 order_by=self.tbl_suite.c.suite_name,
3364 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
3365 extension = validator)
3367 mapper(Archive, self.tbl_archive,
3368 properties = dict(archive_id = self.tbl_archive.c.id,
3369 archive_name = self.tbl_archive.c.name))
3371 mapper(ArchiveFile, self.tbl_files_archive_map,
3372 properties = dict(archive = relation(Archive, backref='files'),
3373 component = relation(Component),
3374 file = relation(PoolFile, backref='archives')))
3376 mapper(BuildQueue, self.tbl_build_queue,
3377 properties = dict(queue_id = self.tbl_build_queue.c.id,
3378 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
3380 mapper(BuildQueueFile, self.tbl_build_queue_files,
3381 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3382 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3384 mapper(BuildQueuePolicyFile, self.tbl_build_queue_policy_files,
3386 build_queue = relation(BuildQueue, backref='policy_queue_files'),
3387 file = relation(ChangePendingFile, lazy='joined')))
3389 mapper(DBBinary, self.tbl_binaries,
3390 properties = dict(binary_id = self.tbl_binaries.c.id,
3391 package = self.tbl_binaries.c.package,
3392 version = self.tbl_binaries.c.version,
3393 maintainer_id = self.tbl_binaries.c.maintainer,
3394 maintainer = relation(Maintainer),
3395 source_id = self.tbl_binaries.c.source,
3396 source = relation(DBSource, backref='binaries'),
3397 arch_id = self.tbl_binaries.c.architecture,
3398 architecture = relation(Architecture),
3399 poolfile_id = self.tbl_binaries.c.file,
3400 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3401 binarytype = self.tbl_binaries.c.type,
3402 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3403 fingerprint = relation(Fingerprint),
3404 install_date = self.tbl_binaries.c.install_date,
3405 suites = relation(Suite, secondary=self.tbl_bin_associations,
3406 backref=backref('binaries', lazy='dynamic')),
3407 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3408 backref=backref('extra_binary_references', lazy='dynamic')),
3409 key = relation(BinaryMetadata, cascade='all',
3410 collection_class=attribute_mapped_collection('key'))),
3411 extension = validator)
3413 mapper(BinaryACL, self.tbl_binary_acl,
3414 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3416 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3417 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3418 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3419 architecture = relation(Architecture)))
3421 mapper(Component, self.tbl_component,
3422 properties = dict(component_id = self.tbl_component.c.id,
3423 component_name = self.tbl_component.c.name),
3424 extension = validator)
3426 mapper(DBConfig, self.tbl_config,
3427 properties = dict(config_id = self.tbl_config.c.id))
3429 mapper(DSCFile, self.tbl_dsc_files,
3430 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3431 source_id = self.tbl_dsc_files.c.source,
3432 source = relation(DBSource),
3433 poolfile_id = self.tbl_dsc_files.c.file,
3434 poolfile = relation(PoolFile)))
3436 mapper(ExternalOverride, self.tbl_external_overrides,
3438 suite_id = self.tbl_external_overrides.c.suite,
3439 suite = relation(Suite),
3440 component_id = self.tbl_external_overrides.c.component,
3441 component = relation(Component)))
3443 mapper(PoolFile, self.tbl_files,
3444 properties = dict(file_id = self.tbl_files.c.id,
3445 filesize = self.tbl_files.c.size),
3446 extension = validator)
3448 mapper(Fingerprint, self.tbl_fingerprint,
3449 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3450 uid_id = self.tbl_fingerprint.c.uid,
3451 uid = relation(Uid),
3452 keyring_id = self.tbl_fingerprint.c.keyring,
3453 keyring = relation(Keyring),
3454 source_acl = relation(SourceACL),
3455 binary_acl = relation(BinaryACL)),
3456 extension = validator)
3458 mapper(Keyring, self.tbl_keyrings,
3459 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3460 keyring_id = self.tbl_keyrings.c.id))
3462 mapper(DBChange, self.tbl_changes,
3463 properties = dict(change_id = self.tbl_changes.c.id,
3464 poolfiles = relation(PoolFile,
3465 secondary=self.tbl_changes_pool_files,
3466 backref="changeslinks"),
3467 seen = self.tbl_changes.c.seen,
3468 source = self.tbl_changes.c.source,
3469 binaries = self.tbl_changes.c.binaries,
3470 architecture = self.tbl_changes.c.architecture,
3471 distribution = self.tbl_changes.c.distribution,
3472 urgency = self.tbl_changes.c.urgency,
3473 maintainer = self.tbl_changes.c.maintainer,
3474 changedby = self.tbl_changes.c.changedby,
3475 date = self.tbl_changes.c.date,
3476 version = self.tbl_changes.c.version,
3477 files = relation(ChangePendingFile,
3478 secondary=self.tbl_changes_pending_files_map,
3479 backref="changesfile"),
3480 in_queue_id = self.tbl_changes.c.in_queue,
3481 in_queue = relation(PolicyQueue,
3482 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3483 approved_for_id = self.tbl_changes.c.approved_for))
3485 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3486 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3488 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3489 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3490 filename = self.tbl_changes_pending_files.c.filename,
3491 size = self.tbl_changes_pending_files.c.size,
3492 md5sum = self.tbl_changes_pending_files.c.md5sum,
3493 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3494 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3496 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3497 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3498 change = relation(DBChange),
3499 maintainer = relation(Maintainer,
3500 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3501 changedby = relation(Maintainer,
3502 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3503 fingerprint = relation(Fingerprint),
3504 source_files = relation(ChangePendingFile,
3505 secondary=self.tbl_changes_pending_source_files,
3506 backref="pending_sources")))
3509 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3510 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3511 keyring = relation(Keyring, backref="keyring_acl_map"),
3512 architecture = relation(Architecture)))
3514 mapper(Location, self.tbl_location,
3515 properties = dict(location_id = self.tbl_location.c.id,
3516 component_id = self.tbl_location.c.component,
3517 component = relation(Component, backref='location'),
3518 archive_id = self.tbl_location.c.archive,
3519 archive = relation(Archive),
3520 # FIXME: the 'type' column is old cruft and
3521 # should be removed in the future.
3522 archive_type = self.tbl_location.c.type),
3523 extension = validator)
3525 mapper(Maintainer, self.tbl_maintainer,
3526 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3527 maintains_sources = relation(DBSource, backref='maintainer',
3528 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3529 changed_sources = relation(DBSource, backref='changedby',
3530 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3531 extension = validator)
3533 mapper(NewComment, self.tbl_new_comments,
3534 properties = dict(comment_id = self.tbl_new_comments.c.id))
3536 mapper(Override, self.tbl_override,
3537 properties = dict(suite_id = self.tbl_override.c.suite,
3538 suite = relation(Suite, \
3539 backref=backref('overrides', lazy='dynamic')),
3540 package = self.tbl_override.c.package,
3541 component_id = self.tbl_override.c.component,
3542 component = relation(Component, \
3543 backref=backref('overrides', lazy='dynamic')),
3544 priority_id = self.tbl_override.c.priority,
3545 priority = relation(Priority, \
3546 backref=backref('overrides', lazy='dynamic')),
3547 section_id = self.tbl_override.c.section,
3548 section = relation(Section, \
3549 backref=backref('overrides', lazy='dynamic')),
3550 overridetype_id = self.tbl_override.c.type,
3551 overridetype = relation(OverrideType, \
3552 backref=backref('overrides', lazy='dynamic'))))
3554 mapper(OverrideType, self.tbl_override_type,
3555 properties = dict(overridetype = self.tbl_override_type.c.type,
3556 overridetype_id = self.tbl_override_type.c.id))
3558 mapper(PolicyQueue, self.tbl_policy_queue,
3559 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3561 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
3563 changes = relation(DBChange),
3564 policy_queue = relation(PolicyQueue, backref='uploads'),
3565 target_suite = relation(Suite),
3566 source = relation(DBSource),
3567 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
3570 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
3572 upload = relation(PolicyQueueUpload, backref='byhand'),
3576 mapper(Priority, self.tbl_priority,
3577 properties = dict(priority_id = self.tbl_priority.c.id))
3579 mapper(Section, self.tbl_section,
3580 properties = dict(section_id = self.tbl_section.c.id,
3581 section=self.tbl_section.c.section))
3583 mapper(DBSource, self.tbl_source,
3584 properties = dict(source_id = self.tbl_source.c.id,
3585 version = self.tbl_source.c.version,
3586 maintainer_id = self.tbl_source.c.maintainer,
3587 poolfile_id = self.tbl_source.c.file,
3588 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3589 fingerprint_id = self.tbl_source.c.sig_fpr,
3590 fingerprint = relation(Fingerprint),
3591 changedby_id = self.tbl_source.c.changedby,
3592 srcfiles = relation(DSCFile,
3593 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3594 suites = relation(Suite, secondary=self.tbl_src_associations,
3595 backref=backref('sources', lazy='dynamic')),
3596 uploaders = relation(Maintainer,
3597 secondary=self.tbl_src_uploaders),
3598 key = relation(SourceMetadata, cascade='all',
3599 collection_class=attribute_mapped_collection('key'))),
3600 extension = validator)
3602 mapper(SourceACL, self.tbl_source_acl,
3603 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3605 mapper(SrcFormat, self.tbl_src_format,
3606 properties = dict(src_format_id = self.tbl_src_format.c.id,
3607 format_name = self.tbl_src_format.c.format_name))
3609 mapper(Suite, self.tbl_suite,
3610 properties = dict(suite_id = self.tbl_suite.c.id,
3611 policy_queue = relation(PolicyQueue),
3612 copy_queues = relation(BuildQueue,
3613 secondary=self.tbl_suite_build_queue_copy),
3614 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
3615 backref=backref('suites', lazy='dynamic')),
3616 archive = relation(Archive, backref='suites')),
3617 extension = validator)
3619 mapper(Uid, self.tbl_uid,
3620 properties = dict(uid_id = self.tbl_uid.c.id,
3621 fingerprint = relation(Fingerprint)),
3622 extension = validator)
3624 mapper(UploadBlock, self.tbl_upload_blocks,
3625 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3626 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3627 uid = relation(Uid, backref="uploadblocks")))
3629 mapper(BinContents, self.tbl_bin_contents,
3631 binary = relation(DBBinary,
3632 backref=backref('contents', lazy='dynamic', cascade='all')),
3633 file = self.tbl_bin_contents.c.file))
3635 mapper(SrcContents, self.tbl_src_contents,
3637 source = relation(DBSource,
3638 backref=backref('contents', lazy='dynamic', cascade='all')),
3639 file = self.tbl_src_contents.c.file))
3641 mapper(MetadataKey, self.tbl_metadata_keys,
3643 key_id = self.tbl_metadata_keys.c.key_id,
3644 key = self.tbl_metadata_keys.c.key))
3646 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3648 binary_id = self.tbl_binaries_metadata.c.bin_id,
3649 binary = relation(DBBinary),
3650 key_id = self.tbl_binaries_metadata.c.key_id,
3651 key = relation(MetadataKey),
3652 value = self.tbl_binaries_metadata.c.value))
3654 mapper(SourceMetadata, self.tbl_source_metadata,
3656 source_id = self.tbl_source_metadata.c.src_id,
3657 source = relation(DBSource),
3658 key_id = self.tbl_source_metadata.c.key_id,
3659 key = relation(MetadataKey),
3660 value = self.tbl_source_metadata.c.value))
3662 mapper(VersionCheck, self.tbl_version_check,
3664 suite_id = self.tbl_version_check.c.suite,
3665 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
3666 reference_id = self.tbl_version_check.c.reference,
3667 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
3669 ## Connection functions
3670 def __createconn(self):
3671 from config import Config
3673 if cnf.has_key("DB::Service"):
3674 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3675 elif cnf.has_key("DB::Host"):
3677 connstr = "postgresql://%s" % cnf["DB::Host"]
3678 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3679 connstr += ":%s" % cnf["DB::Port"]
3680 connstr += "/%s" % cnf["DB::Name"]
3683 connstr = "postgresql:///%s" % cnf["DB::Name"]
3684 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3685 connstr += "?port=%s" % cnf["DB::Port"]
3687 engine_args = { 'echo': self.debug }
3688 if cnf.has_key('DB::PoolSize'):
3689 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3690 if cnf.has_key('DB::MaxOverflow'):
3691 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3692 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3693 cnf['DB::Unicode'] == 'false':
3694 engine_args['use_native_unicode'] = False
3696 # Monkey patch a new dialect in in order to support service= syntax
3697 import sqlalchemy.dialects.postgresql
3698 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3699 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3700 def create_connect_args(self, url):
3701 if str(url).startswith('postgresql://service='):
3703 servicename = str(url)[21:]
3704 return (['service=%s' % servicename], {})
3706 return PGDialect_psycopg2.create_connect_args(self, url)
3708 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3711 self.db_pg = create_engine(connstr, **engine_args)
3712 self.db_meta = MetaData()
3713 self.db_meta.bind = self.db_pg
3714 self.db_smaker = sessionmaker(bind=self.db_pg,
3718 self.__setuptables()
3719 self.__setupmappers()
3721 except OperationalError as e:
3723 utils.fubar("Cannot connect to database (%s)" % str(e))
3725 self.pid = os.getpid()
3727 def session(self, work_mem = 0):
3729 Returns a new session object. If a work_mem parameter is provided a new
3730 transaction is started and the work_mem parameter is set for this
3731 transaction. The work_mem parameter is measured in MB. A default value
3732 will be used if the parameter is not set.
3734 # reinitialize DBConn in new processes
3735 if self.pid != os.getpid():
3738 session = self.db_smaker()
3740 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
3743 __all__.append('DBConn')