5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
63 from sqlalchemy import types as sqltypes
64 from sqlalchemy.orm.collections import attribute_mapped_collection
65 from sqlalchemy.ext.associationproxy import association_proxy
67 # Don't remove this, we re-export the exceptions to scripts which import us
68 from sqlalchemy.exc import *
69 from sqlalchemy.orm.exc import NoResultFound
71 # Only import Config until Queue stuff is changed to store its config
73 from config import Config
74 from textutils import fix_maintainer
75 from dak_exceptions import DBUpdateError, NoSourceFieldError
77 # suppress some deprecation warnings in squeeze related to sqlalchemy
79 warnings.filterwarnings('ignore', \
80 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 ################################################################################
86 # Patch in support for the debversion field type so that it works during
90 # that is for sqlalchemy 0.6
91 UserDefinedType = sqltypes.UserDefinedType
93 # this one for sqlalchemy 0.5
94 UserDefinedType = sqltypes.TypeEngine
96 class DebVersion(UserDefinedType):
97 def get_col_spec(self):
100 def bind_processor(self, dialect):
103 # ' = None' is needed for sqlalchemy 0.5:
104 def result_processor(self, dialect, coltype = None):
107 sa_major_version = sqlalchemy.__version__[0:3]
108 if sa_major_version in ["0.5", "0.6"]:
109 from sqlalchemy.databases import postgres
110 postgres.ischema_names['debversion'] = DebVersion
112 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
114 ################################################################################
116 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
118 ################################################################################
120 def session_wrapper(fn):
122 Wrapper around common ".., session=None):" handling. If the wrapped
123 function is called without passing 'session', we create a local one
124 and destroy it when the function ends.
126 Also attaches a commit_or_flush method to the session; if we created a
127 local session, this is a synonym for session.commit(), otherwise it is a
128 synonym for session.flush().
131 def wrapped(*args, **kwargs):
132 private_transaction = False
134 # Find the session object
135 session = kwargs.get('session')
138 if len(args) <= len(getargspec(fn)[0]) - 1:
139 # No session specified as last argument or in kwargs
140 private_transaction = True
141 session = kwargs['session'] = DBConn().session()
143 # Session is last argument in args
147 session = args[-1] = DBConn().session()
148 private_transaction = True
150 if private_transaction:
151 session.commit_or_flush = session.commit
153 session.commit_or_flush = session.flush
156 return fn(*args, **kwargs)
158 if private_transaction:
159 # We created a session; close it.
162 wrapped.__doc__ = fn.__doc__
163 wrapped.func_name = fn.func_name
167 __all__.append('session_wrapper')
169 ################################################################################
171 class ORMObject(object):
173 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
174 derived classes must implement the properties() method.
177 def properties(self):
179 This method should be implemented by all derived classes and returns a
180 list of the important properties. The properties 'created' and
181 'modified' will be added automatically. A suffix '_count' should be
182 added to properties that are lists or query objects. The most important
183 property name should be returned as the first element in the list
184 because it is used by repr().
190 Returns a JSON representation of the object based on the properties
191 returned from the properties() method.
194 # add created and modified
195 all_properties = self.properties() + ['created', 'modified']
196 for property in all_properties:
197 # check for list or query
198 if property[-6:] == '_count':
199 real_property = property[:-6]
200 if not hasattr(self, real_property):
202 value = getattr(self, real_property)
203 if hasattr(value, '__len__'):
206 elif hasattr(value, 'count'):
208 value = value.count()
210 raise KeyError('Do not understand property %s.' % property)
212 if not hasattr(self, property):
215 value = getattr(self, property)
219 elif isinstance(value, ORMObject):
220 # use repr() for ORMObject types
223 # we want a string for all other types because json cannot
226 data[property] = value
227 return json.dumps(data)
231 Returns the name of the class.
233 return type(self).__name__
237 Returns a short string representation of the object using the first
238 element from the properties() method.
240 primary_property = self.properties()[0]
241 value = getattr(self, primary_property)
242 return '<%s %s>' % (self.classname(), str(value))
246 Returns a human readable form of the object using the properties()
249 return '<%s %s>' % (self.classname(), self.json())
251 def not_null_constraints(self):
253 Returns a list of properties that must be not NULL. Derived classes
254 should override this method if needed.
258 validation_message = \
259 "Validation failed because property '%s' must not be empty in object\n%s"
263 This function validates the not NULL constraints as returned by
264 not_null_constraints(). It raises the DBUpdateError exception if
267 for property in self.not_null_constraints():
268 # TODO: It is a bit awkward that the mapper configuration allow
269 # directly setting the numeric _id columns. We should get rid of it
271 if hasattr(self, property + '_id') and \
272 getattr(self, property + '_id') is not None:
274 if not hasattr(self, property) or getattr(self, property) is None:
275 raise DBUpdateError(self.validation_message % \
276 (property, str(self)))
280 def get(cls, primary_key, session = None):
282 This is a support function that allows getting an object by its primary
285 Architecture.get(3[, session])
287 instead of the more verbose
289 session.query(Architecture).get(3)
291 return session.query(cls).get(primary_key)
293 def session(self, replace = False):
295 Returns the current session that is associated with the object. May
296 return None is object is in detached state.
299 return object_session(self)
301 def clone(self, session = None):
303 Clones the current object in a new session and returns the new clone. A
304 fresh session is created if the optional session parameter is not
305 provided. The function will fail if a session is provided and has
308 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
309 an existing object to allow several threads to work with their own
310 instances of an ORMObject.
312 WARNING: Only persistent (committed) objects can be cloned. Changes
313 made to the original object that are not committed yet will get lost.
314 The session of the new object will always be rolled back to avoid
318 if self.session() is None:
319 raise RuntimeError( \
320 'Method clone() failed for detached object:\n%s' % self)
321 self.session().flush()
322 mapper = object_mapper(self)
323 primary_key = mapper.primary_key_from_instance(self)
324 object_class = self.__class__
326 session = DBConn().session()
327 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
328 raise RuntimeError( \
329 'Method clone() failed due to unflushed changes in session.')
330 new_object = session.query(object_class).get(primary_key)
332 if new_object is None:
333 raise RuntimeError( \
334 'Method clone() failed for non-persistent object:\n%s' % self)
337 __all__.append('ORMObject')
339 ################################################################################
341 class Validator(MapperExtension):
343 This class calls the validate() method for each instance for the
344 'before_update' and 'before_insert' events. A global object validator is
345 used for configuring the individual mappers.
348 def before_update(self, mapper, connection, instance):
352 def before_insert(self, mapper, connection, instance):
356 validator = Validator()
358 ################################################################################
360 class Architecture(ORMObject):
361 def __init__(self, arch_string = None, description = None):
362 self.arch_string = arch_string
363 self.description = description
365 def __eq__(self, val):
366 if isinstance(val, str):
367 return (self.arch_string== val)
368 # This signals to use the normal comparison operator
369 return NotImplemented
371 def __ne__(self, val):
372 if isinstance(val, str):
373 return (self.arch_string != val)
374 # This signals to use the normal comparison operator
375 return NotImplemented
377 def properties(self):
378 return ['arch_string', 'arch_id', 'suites_count']
380 def not_null_constraints(self):
381 return ['arch_string']
383 __all__.append('Architecture')
386 def get_architecture(architecture, session=None):
388 Returns database id for given C{architecture}.
390 @type architecture: string
391 @param architecture: The name of the architecture
393 @type session: Session
394 @param session: Optional SQLA session object (a temporary one will be
395 generated if not supplied)
398 @return: Architecture object for the given arch (None if not present)
401 q = session.query(Architecture).filter_by(arch_string=architecture)
405 except NoResultFound:
408 __all__.append('get_architecture')
410 # TODO: should be removed because the implementation is too trivial
412 def get_architecture_suites(architecture, session=None):
414 Returns list of Suite objects for given C{architecture} name
416 @type architecture: str
417 @param architecture: Architecture name to search for
419 @type session: Session
420 @param session: Optional SQL session object (a temporary one will be
421 generated if not supplied)
424 @return: list of Suite objects for the given name (may be empty)
427 return get_architecture(architecture, session).suites
429 __all__.append('get_architecture_suites')
431 ################################################################################
433 class Archive(object):
434 def __init__(self, *args, **kwargs):
438 return '<Archive %s>' % self.archive_name
440 __all__.append('Archive')
443 def get_archive(archive, session=None):
445 returns database id for given C{archive}.
447 @type archive: string
448 @param archive: the name of the arhive
450 @type session: Session
451 @param session: Optional SQLA session object (a temporary one will be
452 generated if not supplied)
455 @return: Archive object for the given name (None if not present)
458 archive = archive.lower()
460 q = session.query(Archive).filter_by(archive_name=archive)
464 except NoResultFound:
467 __all__.append('get_archive')
469 ################################################################################
471 class BinContents(ORMObject):
472 def __init__(self, file = None, binary = None):
476 def properties(self):
477 return ['file', 'binary']
479 __all__.append('BinContents')
481 ################################################################################
483 class DBBinary(ORMObject):
484 def __init__(self, package = None, source = None, version = None, \
485 maintainer = None, architecture = None, poolfile = None, \
487 self.package = package
489 self.version = version
490 self.maintainer = maintainer
491 self.architecture = architecture
492 self.poolfile = poolfile
493 self.binarytype = binarytype
497 return self.binary_id
499 def properties(self):
500 return ['package', 'version', 'maintainer', 'source', 'architecture', \
501 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
502 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
504 def not_null_constraints(self):
505 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
508 metadata = association_proxy('key', 'value')
510 def get_component_name(self):
511 return self.poolfile.location.component.component_name
513 def scan_contents(self):
515 Yields the contents of the package. Only regular files are yielded and
516 the path names are normalized after converting them from either utf-8
517 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
518 package does not contain any regular file.
520 fullpath = self.poolfile.fullpath
521 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE)
522 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
523 for member in tar.getmembers():
524 if not member.isdir():
525 name = normpath(member.name)
526 # enforce proper utf-8 encoding
529 except UnicodeDecodeError:
530 name = name.decode('iso8859-1').encode('utf-8')
536 def read_control(self):
538 Reads the control information from a binary.
541 @return: stanza text of the control section.
544 fullpath = self.poolfile.fullpath
545 deb_file = open(fullpath, 'r')
546 stanza = apt_inst.debExtractControl(deb_file)
551 def read_control_fields(self):
553 Reads the control information from a binary and return
557 @return: fields of the control section as a dictionary.
560 stanza = self.read_control()
561 return apt_pkg.TagSection(stanza)
563 __all__.append('DBBinary')
566 def get_suites_binary_in(package, session=None):
568 Returns list of Suite objects which given C{package} name is in
571 @param package: DBBinary package name to search for
574 @return: list of Suite objects for the given package
577 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
579 __all__.append('get_suites_binary_in')
582 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
584 Returns the component name of the newest binary package in suite_list or
585 None if no package is found. The result can be optionally filtered by a list
586 of architecture names.
589 @param package: DBBinary package name to search for
591 @type suite_list: list of str
592 @param suite_list: list of suite_name items
594 @type arch_list: list of str
595 @param arch_list: optional list of arch_string items that defaults to []
597 @rtype: str or NoneType
598 @return: name of component or None
601 q = session.query(DBBinary).filter_by(package = package). \
602 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
603 if len(arch_list) > 0:
604 q = q.join(DBBinary.architecture). \
605 filter(Architecture.arch_string.in_(arch_list))
606 binary = q.order_by(desc(DBBinary.version)).first()
610 return binary.get_component_name()
612 __all__.append('get_component_by_package_suite')
614 ################################################################################
616 class BinaryACL(object):
617 def __init__(self, *args, **kwargs):
621 return '<BinaryACL %s>' % self.binary_acl_id
623 __all__.append('BinaryACL')
625 ################################################################################
627 class BinaryACLMap(object):
628 def __init__(self, *args, **kwargs):
632 return '<BinaryACLMap %s>' % self.binary_acl_map_id
634 __all__.append('BinaryACLMap')
636 ################################################################################
641 ArchiveDir "%(archivepath)s";
642 OverrideDir "%(overridedir)s";
643 CacheDir "%(cachedir)s";
648 Packages::Compress ". bzip2 gzip";
649 Sources::Compress ". bzip2 gzip";
654 bindirectory "incoming"
659 BinOverride "override.sid.all3";
660 BinCacheDB "packages-accepted.db";
662 FileList "%(filelist)s";
665 Packages::Extensions ".deb .udeb";
668 bindirectory "incoming/"
671 BinOverride "override.sid.all3";
672 SrcOverride "override.sid.all3.src";
673 FileList "%(filelist)s";
677 class BuildQueue(object):
678 def __init__(self, *args, **kwargs):
682 return '<BuildQueue %s>' % self.queue_name
684 def write_metadata(self, starttime, force=False):
685 # Do we write out metafiles?
686 if not (force or self.generate_metadata):
689 session = DBConn().session().object_session(self)
691 fl_fd = fl_name = ac_fd = ac_name = None
693 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
694 startdir = os.getcwd()
697 # Grab files we want to include
698 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
699 # Write file list with newer files
700 (fl_fd, fl_name) = mkstemp()
702 os.write(fl_fd, '%s\n' % n.fullpath)
707 # Write minimal apt.conf
708 # TODO: Remove hardcoding from template
709 (ac_fd, ac_name) = mkstemp()
710 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
712 'cachedir': cnf["Dir::Cache"],
713 'overridedir': cnf["Dir::Override"],
717 # Run apt-ftparchive generate
718 os.chdir(os.path.dirname(ac_name))
719 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
721 # Run apt-ftparchive release
722 # TODO: Eww - fix this
723 bname = os.path.basename(self.path)
727 # We have to remove the Release file otherwise it'll be included in the
730 os.unlink(os.path.join(bname, 'Release'))
734 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
736 # Crude hack with open and append, but this whole section is and should be redone.
737 if self.notautomatic:
738 release=open("Release", "a")
739 release.write("NotAutomatic: yes")
744 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
745 if cnf.has_key("Dinstall::SigningPubKeyring"):
746 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
748 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
750 # Move the files if we got this far
751 os.rename('Release', os.path.join(bname, 'Release'))
753 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
755 # Clean up any left behind files
782 def clean_and_update(self, starttime, Logger, dryrun=False):
783 """WARNING: This routine commits for you"""
784 session = DBConn().session().object_session(self)
786 if self.generate_metadata and not dryrun:
787 self.write_metadata(starttime)
789 # Grab files older than our execution time
790 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
796 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
798 Logger.log(["I: Removing %s from the queue" % o.fullpath])
799 os.unlink(o.fullpath)
802 # If it wasn't there, don't worry
803 if e.errno == ENOENT:
806 # TODO: Replace with proper logging call
807 Logger.log(["E: Could not remove %s" % o.fullpath])
814 for f in os.listdir(self.path):
815 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
819 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
820 except NoResultFound:
821 fp = os.path.join(self.path, f)
823 Logger.log(["I: Would remove unused link %s" % fp])
825 Logger.log(["I: Removing unused link %s" % fp])
829 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
831 def add_file_from_pool(self, poolfile):
832 """Copies a file into the pool. Assumes that the PoolFile object is
833 attached to the same SQLAlchemy session as the Queue object is.
835 The caller is responsible for committing after calling this function."""
836 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
838 # Check if we have a file of this name or this ID already
839 for f in self.queuefiles:
840 if f.fileid is not None and f.fileid == poolfile.file_id or \
841 f.poolfile.filename == poolfile_basename:
842 # In this case, update the BuildQueueFile entry so we
843 # don't remove it too early
844 f.lastused = datetime.now()
845 DBConn().session().object_session(poolfile).add(f)
848 # Prepare BuildQueueFile object
849 qf = BuildQueueFile()
850 qf.build_queue_id = self.queue_id
851 qf.lastused = datetime.now()
852 qf.filename = poolfile_basename
854 targetpath = poolfile.fullpath
855 queuepath = os.path.join(self.path, poolfile_basename)
859 # We need to copy instead of symlink
861 utils.copy(targetpath, queuepath)
862 # NULL in the fileid field implies a copy
865 os.symlink(targetpath, queuepath)
866 qf.fileid = poolfile.file_id
870 # Get the same session as the PoolFile is using and add the qf to it
871 DBConn().session().object_session(poolfile).add(qf)
876 __all__.append('BuildQueue')
879 def get_build_queue(queuename, session=None):
881 Returns BuildQueue object for given C{queue name}, creating it if it does not
884 @type queuename: string
885 @param queuename: The name of the queue
887 @type session: Session
888 @param session: Optional SQLA session object (a temporary one will be
889 generated if not supplied)
892 @return: BuildQueue object for the given queue
895 q = session.query(BuildQueue).filter_by(queue_name=queuename)
899 except NoResultFound:
902 __all__.append('get_build_queue')
904 ################################################################################
906 class BuildQueueFile(object):
907 def __init__(self, *args, **kwargs):
911 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
915 return os.path.join(self.buildqueue.path, self.filename)
918 __all__.append('BuildQueueFile')
920 ################################################################################
922 class ChangePendingBinary(object):
923 def __init__(self, *args, **kwargs):
927 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
929 __all__.append('ChangePendingBinary')
931 ################################################################################
933 class ChangePendingFile(object):
934 def __init__(self, *args, **kwargs):
938 return '<ChangePendingFile %s>' % self.change_pending_file_id
940 __all__.append('ChangePendingFile')
942 ################################################################################
944 class ChangePendingSource(object):
945 def __init__(self, *args, **kwargs):
949 return '<ChangePendingSource %s>' % self.change_pending_source_id
951 __all__.append('ChangePendingSource')
953 ################################################################################
955 class Component(ORMObject):
956 def __init__(self, component_name = None):
957 self.component_name = component_name
959 def __eq__(self, val):
960 if isinstance(val, str):
961 return (self.component_name == val)
962 # This signals to use the normal comparison operator
963 return NotImplemented
965 def __ne__(self, val):
966 if isinstance(val, str):
967 return (self.component_name != val)
968 # This signals to use the normal comparison operator
969 return NotImplemented
971 def properties(self):
972 return ['component_name', 'component_id', 'description', \
973 'location_count', 'meets_dfsg', 'overrides_count']
975 def not_null_constraints(self):
976 return ['component_name']
979 __all__.append('Component')
982 def get_component(component, session=None):
984 Returns database id for given C{component}.
986 @type component: string
987 @param component: The name of the override type
990 @return: the database id for the given component
993 component = component.lower()
995 q = session.query(Component).filter_by(component_name=component)
999 except NoResultFound:
1002 __all__.append('get_component')
1004 ################################################################################
1006 class DBConfig(object):
1007 def __init__(self, *args, **kwargs):
1011 return '<DBConfig %s>' % self.name
1013 __all__.append('DBConfig')
1015 ################################################################################
1018 def get_or_set_contents_file_id(filename, session=None):
1020 Returns database id for given filename.
1022 If no matching file is found, a row is inserted.
1024 @type filename: string
1025 @param filename: The filename
1026 @type session: SQLAlchemy
1027 @param session: Optional SQL session object (a temporary one will be
1028 generated if not supplied). If not passed, a commit will be performed at
1029 the end of the function, otherwise the caller is responsible for commiting.
1032 @return: the database id for the given component
1035 q = session.query(ContentFilename).filter_by(filename=filename)
1038 ret = q.one().cafilename_id
1039 except NoResultFound:
1040 cf = ContentFilename()
1041 cf.filename = filename
1043 session.commit_or_flush()
1044 ret = cf.cafilename_id
1048 __all__.append('get_or_set_contents_file_id')
1051 def get_contents(suite, overridetype, section=None, session=None):
1053 Returns contents for a suite / overridetype combination, limiting
1054 to a section if not None.
1057 @param suite: Suite object
1059 @type overridetype: OverrideType
1060 @param overridetype: OverrideType object
1062 @type section: Section
1063 @param section: Optional section object to limit results to
1065 @type session: SQLAlchemy
1066 @param session: Optional SQL session object (a temporary one will be
1067 generated if not supplied)
1069 @rtype: ResultsProxy
1070 @return: ResultsProxy object set up to return tuples of (filename, section,
1074 # find me all of the contents for a given suite
1075 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1079 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1080 JOIN content_file_names n ON (c.filename=n.id)
1081 JOIN binaries b ON (b.id=c.binary_pkg)
1082 JOIN override o ON (o.package=b.package)
1083 JOIN section s ON (s.id=o.section)
1084 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1085 AND b.type=:overridetypename"""
1087 vals = {'suiteid': suite.suite_id,
1088 'overridetypeid': overridetype.overridetype_id,
1089 'overridetypename': overridetype.overridetype}
1091 if section is not None:
1092 contents_q += " AND s.id = :sectionid"
1093 vals['sectionid'] = section.section_id
1095 contents_q += " ORDER BY fn"
1097 return session.execute(contents_q, vals)
1099 __all__.append('get_contents')
1101 ################################################################################
1103 class ContentFilepath(object):
1104 def __init__(self, *args, **kwargs):
1108 return '<ContentFilepath %s>' % self.filepath
1110 __all__.append('ContentFilepath')
1113 def get_or_set_contents_path_id(filepath, session=None):
1115 Returns database id for given path.
1117 If no matching file is found, a row is inserted.
1119 @type filepath: string
1120 @param filepath: The filepath
1122 @type session: SQLAlchemy
1123 @param session: Optional SQL session object (a temporary one will be
1124 generated if not supplied). If not passed, a commit will be performed at
1125 the end of the function, otherwise the caller is responsible for commiting.
1128 @return: the database id for the given path
1131 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1134 ret = q.one().cafilepath_id
1135 except NoResultFound:
1136 cf = ContentFilepath()
1137 cf.filepath = filepath
1139 session.commit_or_flush()
1140 ret = cf.cafilepath_id
1144 __all__.append('get_or_set_contents_path_id')
1146 ################################################################################
1148 class ContentAssociation(object):
1149 def __init__(self, *args, **kwargs):
1153 return '<ContentAssociation %s>' % self.ca_id
1155 __all__.append('ContentAssociation')
1157 def insert_content_paths(binary_id, fullpaths, session=None):
1159 Make sure given path is associated with given binary id
1161 @type binary_id: int
1162 @param binary_id: the id of the binary
1163 @type fullpaths: list
1164 @param fullpaths: the list of paths of the file being associated with the binary
1165 @type session: SQLAlchemy session
1166 @param session: Optional SQLAlchemy session. If this is passed, the caller
1167 is responsible for ensuring a transaction has begun and committing the
1168 results or rolling back based on the result code. If not passed, a commit
1169 will be performed at the end of the function, otherwise the caller is
1170 responsible for commiting.
1172 @return: True upon success
1175 privatetrans = False
1177 session = DBConn().session()
1182 def generate_path_dicts():
1183 for fullpath in fullpaths:
1184 if fullpath.startswith( './' ):
1185 fullpath = fullpath[2:]
1187 yield {'filename':fullpath, 'id': binary_id }
1189 for d in generate_path_dicts():
1190 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1199 traceback.print_exc()
1201 # Only rollback if we set up the session ourself
1208 __all__.append('insert_content_paths')
1210 ################################################################################
1212 class DSCFile(object):
1213 def __init__(self, *args, **kwargs):
1217 return '<DSCFile %s>' % self.dscfile_id
1219 __all__.append('DSCFile')
1222 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1224 Returns a list of DSCFiles which may be empty
1226 @type dscfile_id: int (optional)
1227 @param dscfile_id: the dscfile_id of the DSCFiles to find
1229 @type source_id: int (optional)
1230 @param source_id: the source id related to the DSCFiles to find
1232 @type poolfile_id: int (optional)
1233 @param poolfile_id: the poolfile id related to the DSCFiles to find
1236 @return: Possibly empty list of DSCFiles
1239 q = session.query(DSCFile)
1241 if dscfile_id is not None:
1242 q = q.filter_by(dscfile_id=dscfile_id)
1244 if source_id is not None:
1245 q = q.filter_by(source_id=source_id)
1247 if poolfile_id is not None:
1248 q = q.filter_by(poolfile_id=poolfile_id)
1252 __all__.append('get_dscfiles')
1254 ################################################################################
1256 class PoolFile(ORMObject):
1257 def __init__(self, filename = None, location = None, filesize = -1, \
1259 self.filename = filename
1260 self.location = location
1261 self.filesize = filesize
1262 self.md5sum = md5sum
1266 return os.path.join(self.location.path, self.filename)
1268 def is_valid(self, filesize = -1, md5sum = None):
1269 return self.filesize == long(filesize) and self.md5sum == md5sum
1271 def properties(self):
1272 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1273 'sha256sum', 'location', 'source', 'binary', 'last_used']
1275 def not_null_constraints(self):
1276 return ['filename', 'md5sum', 'location']
1278 __all__.append('PoolFile')
1281 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1284 (ValidFileFound [boolean], PoolFile object or None)
1286 @type filename: string
1287 @param filename: the filename of the file to check against the DB
1290 @param filesize: the size of the file to check against the DB
1292 @type md5sum: string
1293 @param md5sum: the md5sum of the file to check against the DB
1295 @type location_id: int
1296 @param location_id: the id of the location to look in
1299 @return: Tuple of length 2.
1300 - If valid pool file found: (C{True}, C{PoolFile object})
1301 - If valid pool file not found:
1302 - (C{False}, C{None}) if no file found
1303 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1306 poolfile = session.query(Location).get(location_id). \
1307 files.filter_by(filename=filename).first()
1309 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1312 return (valid, poolfile)
1314 __all__.append('check_poolfile')
1316 # TODO: the implementation can trivially be inlined at the place where the
1317 # function is called
1319 def get_poolfile_by_id(file_id, session=None):
1321 Returns a PoolFile objects or None for the given id
1324 @param file_id: the id of the file to look for
1326 @rtype: PoolFile or None
1327 @return: either the PoolFile object or None
1330 return session.query(PoolFile).get(file_id)
1332 __all__.append('get_poolfile_by_id')
1335 def get_poolfile_like_name(filename, session=None):
1337 Returns an array of PoolFile objects which are like the given name
1339 @type filename: string
1340 @param filename: the filename of the file to check against the DB
1343 @return: array of PoolFile objects
1346 # TODO: There must be a way of properly using bind parameters with %FOO%
1347 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1351 __all__.append('get_poolfile_like_name')
1354 def add_poolfile(filename, datadict, location_id, session=None):
1356 Add a new file to the pool
1358 @type filename: string
1359 @param filename: filename
1361 @type datadict: dict
1362 @param datadict: dict with needed data
1364 @type location_id: int
1365 @param location_id: database id of the location
1368 @return: the PoolFile object created
1370 poolfile = PoolFile()
1371 poolfile.filename = filename
1372 poolfile.filesize = datadict["size"]
1373 poolfile.md5sum = datadict["md5sum"]
1374 poolfile.sha1sum = datadict["sha1sum"]
1375 poolfile.sha256sum = datadict["sha256sum"]
1376 poolfile.location_id = location_id
1378 session.add(poolfile)
1379 # Flush to get a file id (NB: This is not a commit)
1384 __all__.append('add_poolfile')
1386 ################################################################################
1388 class Fingerprint(ORMObject):
1389 def __init__(self, fingerprint = None):
1390 self.fingerprint = fingerprint
1392 def properties(self):
1393 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1396 def not_null_constraints(self):
1397 return ['fingerprint']
1399 __all__.append('Fingerprint')
1402 def get_fingerprint(fpr, session=None):
1404 Returns Fingerprint object for given fpr.
1407 @param fpr: The fpr to find / add
1409 @type session: SQLAlchemy
1410 @param session: Optional SQL session object (a temporary one will be
1411 generated if not supplied).
1414 @return: the Fingerprint object for the given fpr or None
1417 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1421 except NoResultFound:
1426 __all__.append('get_fingerprint')
1429 def get_or_set_fingerprint(fpr, session=None):
1431 Returns Fingerprint object for given fpr.
1433 If no matching fpr is found, a row is inserted.
1436 @param fpr: The fpr to find / add
1438 @type session: SQLAlchemy
1439 @param session: Optional SQL session object (a temporary one will be
1440 generated if not supplied). If not passed, a commit will be performed at
1441 the end of the function, otherwise the caller is responsible for commiting.
1442 A flush will be performed either way.
1445 @return: the Fingerprint object for the given fpr
1448 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1452 except NoResultFound:
1453 fingerprint = Fingerprint()
1454 fingerprint.fingerprint = fpr
1455 session.add(fingerprint)
1456 session.commit_or_flush()
1461 __all__.append('get_or_set_fingerprint')
1463 ################################################################################
1465 # Helper routine for Keyring class
1466 def get_ldap_name(entry):
1468 for k in ["cn", "mn", "sn"]:
1470 if ret and ret[0] != "" and ret[0] != "-":
1472 return " ".join(name)
1474 ################################################################################
1476 class Keyring(object):
1477 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1478 " --with-colons --fingerprint --fingerprint"
1483 def __init__(self, *args, **kwargs):
1487 return '<Keyring %s>' % self.keyring_name
1489 def de_escape_gpg_str(self, txt):
1490 esclist = re.split(r'(\\x..)', txt)
1491 for x in range(1,len(esclist),2):
1492 esclist[x] = "%c" % (int(esclist[x][2:],16))
1493 return "".join(esclist)
1495 def parse_address(self, uid):
1496 """parses uid and returns a tuple of real name and email address"""
1498 (name, address) = email.Utils.parseaddr(uid)
1499 name = re.sub(r"\s*[(].*[)]", "", name)
1500 name = self.de_escape_gpg_str(name)
1503 return (name, address)
1505 def load_keys(self, keyring):
1506 if not self.keyring_id:
1507 raise Exception('Must be initialized with database information')
1509 k = os.popen(self.gpg_invocation % keyring, "r")
1513 for line in k.xreadlines():
1514 field = line.split(":")
1515 if field[0] == "pub":
1518 (name, addr) = self.parse_address(field[9])
1520 self.keys[key]["email"] = addr
1521 self.keys[key]["name"] = name
1522 self.keys[key]["fingerprints"] = []
1524 elif key and field[0] == "sub" and len(field) >= 12:
1525 signingkey = ("s" in field[11])
1526 elif key and field[0] == "uid":
1527 (name, addr) = self.parse_address(field[9])
1528 if "email" not in self.keys[key] and "@" in addr:
1529 self.keys[key]["email"] = addr
1530 self.keys[key]["name"] = name
1531 elif signingkey and field[0] == "fpr":
1532 self.keys[key]["fingerprints"].append(field[9])
1533 self.fpr_lookup[field[9]] = key
1535 def import_users_from_ldap(self, session):
1539 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1540 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1542 l = ldap.open(LDAPServer)
1543 l.simple_bind_s("","")
1544 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1545 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1546 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1548 ldap_fin_uid_id = {}
1555 uid = entry["uid"][0]
1556 name = get_ldap_name(entry)
1557 fingerprints = entry["keyFingerPrint"]
1559 for f in fingerprints:
1560 key = self.fpr_lookup.get(f, None)
1561 if key not in self.keys:
1563 self.keys[key]["uid"] = uid
1567 keyid = get_or_set_uid(uid, session).uid_id
1568 byuid[keyid] = (uid, name)
1569 byname[uid] = (keyid, name)
1571 return (byname, byuid)
1573 def generate_users_from_keyring(self, format, session):
1577 for x in self.keys.keys():
1578 if "email" not in self.keys[x]:
1580 self.keys[x]["uid"] = format % "invalid-uid"
1582 uid = format % self.keys[x]["email"]
1583 keyid = get_or_set_uid(uid, session).uid_id
1584 byuid[keyid] = (uid, self.keys[x]["name"])
1585 byname[uid] = (keyid, self.keys[x]["name"])
1586 self.keys[x]["uid"] = uid
1589 uid = format % "invalid-uid"
1590 keyid = get_or_set_uid(uid, session).uid_id
1591 byuid[keyid] = (uid, "ungeneratable user id")
1592 byname[uid] = (keyid, "ungeneratable user id")
1594 return (byname, byuid)
1596 __all__.append('Keyring')
1599 def get_keyring(keyring, session=None):
1601 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1602 If C{keyring} already has an entry, simply return the existing Keyring
1604 @type keyring: string
1605 @param keyring: the keyring name
1608 @return: the Keyring object for this keyring
1611 q = session.query(Keyring).filter_by(keyring_name=keyring)
1615 except NoResultFound:
1618 __all__.append('get_keyring')
1620 ################################################################################
1622 class KeyringACLMap(object):
1623 def __init__(self, *args, **kwargs):
1627 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1629 __all__.append('KeyringACLMap')
1631 ################################################################################
1633 class DBChange(object):
1634 def __init__(self, *args, **kwargs):
1638 return '<DBChange %s>' % self.changesname
1640 def clean_from_queue(self):
1641 session = DBConn().session().object_session(self)
1643 # Remove changes_pool_files entries
1646 # Remove changes_pending_files references
1649 # Clear out of queue
1650 self.in_queue = None
1651 self.approved_for_id = None
1653 __all__.append('DBChange')
1656 def get_dbchange(filename, session=None):
1658 returns DBChange object for given C{filename}.
1660 @type filename: string
1661 @param filename: the name of the file
1663 @type session: Session
1664 @param session: Optional SQLA session object (a temporary one will be
1665 generated if not supplied)
1668 @return: DBChange object for the given filename (C{None} if not present)
1671 q = session.query(DBChange).filter_by(changesname=filename)
1675 except NoResultFound:
1678 __all__.append('get_dbchange')
1680 ################################################################################
1682 class Location(ORMObject):
1683 def __init__(self, path = None, component = None):
1685 self.component = component
1686 # the column 'type' should go away, see comment at mapper
1687 self.archive_type = 'pool'
1689 def properties(self):
1690 return ['path', 'location_id', 'archive_type', 'component', \
1693 def not_null_constraints(self):
1694 return ['path', 'archive_type']
1696 __all__.append('Location')
1699 def get_location(location, component=None, archive=None, session=None):
1701 Returns Location object for the given combination of location, component
1704 @type location: string
1705 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1707 @type component: string
1708 @param component: the component name (if None, no restriction applied)
1710 @type archive: string
1711 @param archive: the archive name (if None, no restriction applied)
1713 @rtype: Location / None
1714 @return: Either a Location object or None if one can't be found
1717 q = session.query(Location).filter_by(path=location)
1719 if archive is not None:
1720 q = q.join(Archive).filter_by(archive_name=archive)
1722 if component is not None:
1723 q = q.join(Component).filter_by(component_name=component)
1727 except NoResultFound:
1730 __all__.append('get_location')
1732 ################################################################################
1734 class Maintainer(ORMObject):
1735 def __init__(self, name = None):
1738 def properties(self):
1739 return ['name', 'maintainer_id']
1741 def not_null_constraints(self):
1744 def get_split_maintainer(self):
1745 if not hasattr(self, 'name') or self.name is None:
1746 return ('', '', '', '')
1748 return fix_maintainer(self.name.strip())
1750 __all__.append('Maintainer')
1753 def get_or_set_maintainer(name, session=None):
1755 Returns Maintainer object for given maintainer name.
1757 If no matching maintainer name is found, a row is inserted.
1760 @param name: The maintainer name to add
1762 @type session: SQLAlchemy
1763 @param session: Optional SQL session object (a temporary one will be
1764 generated if not supplied). If not passed, a commit will be performed at
1765 the end of the function, otherwise the caller is responsible for commiting.
1766 A flush will be performed either way.
1769 @return: the Maintainer object for the given maintainer
1772 q = session.query(Maintainer).filter_by(name=name)
1775 except NoResultFound:
1776 maintainer = Maintainer()
1777 maintainer.name = name
1778 session.add(maintainer)
1779 session.commit_or_flush()
1784 __all__.append('get_or_set_maintainer')
1787 def get_maintainer(maintainer_id, session=None):
1789 Return the name of the maintainer behind C{maintainer_id} or None if that
1790 maintainer_id is invalid.
1792 @type maintainer_id: int
1793 @param maintainer_id: the id of the maintainer
1796 @return: the Maintainer with this C{maintainer_id}
1799 return session.query(Maintainer).get(maintainer_id)
1801 __all__.append('get_maintainer')
1803 ################################################################################
1805 class NewComment(object):
1806 def __init__(self, *args, **kwargs):
1810 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1812 __all__.append('NewComment')
1815 def has_new_comment(package, version, session=None):
1817 Returns true if the given combination of C{package}, C{version} has a comment.
1819 @type package: string
1820 @param package: name of the package
1822 @type version: string
1823 @param version: package version
1825 @type session: Session
1826 @param session: Optional SQLA session object (a temporary one will be
1827 generated if not supplied)
1833 q = session.query(NewComment)
1834 q = q.filter_by(package=package)
1835 q = q.filter_by(version=version)
1837 return bool(q.count() > 0)
1839 __all__.append('has_new_comment')
1842 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1844 Returns (possibly empty) list of NewComment objects for the given
1847 @type package: string (optional)
1848 @param package: name of the package
1850 @type version: string (optional)
1851 @param version: package version
1853 @type comment_id: int (optional)
1854 @param comment_id: An id of a comment
1856 @type session: Session
1857 @param session: Optional SQLA session object (a temporary one will be
1858 generated if not supplied)
1861 @return: A (possibly empty) list of NewComment objects will be returned
1864 q = session.query(NewComment)
1865 if package is not None: q = q.filter_by(package=package)
1866 if version is not None: q = q.filter_by(version=version)
1867 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1871 __all__.append('get_new_comments')
1873 ################################################################################
1875 class Override(ORMObject):
1876 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1877 section = None, priority = None):
1878 self.package = package
1880 self.component = component
1881 self.overridetype = overridetype
1882 self.section = section
1883 self.priority = priority
1885 def properties(self):
1886 return ['package', 'suite', 'component', 'overridetype', 'section', \
1889 def not_null_constraints(self):
1890 return ['package', 'suite', 'component', 'overridetype', 'section']
1892 __all__.append('Override')
1895 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1897 Returns Override object for the given parameters
1899 @type package: string
1900 @param package: The name of the package
1902 @type suite: string, list or None
1903 @param suite: The name of the suite (or suites if a list) to limit to. If
1904 None, don't limit. Defaults to None.
1906 @type component: string, list or None
1907 @param component: The name of the component (or components if a list) to
1908 limit to. If None, don't limit. Defaults to None.
1910 @type overridetype: string, list or None
1911 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1912 limit to. If None, don't limit. Defaults to None.
1914 @type session: Session
1915 @param session: Optional SQLA session object (a temporary one will be
1916 generated if not supplied)
1919 @return: A (possibly empty) list of Override objects will be returned
1922 q = session.query(Override)
1923 q = q.filter_by(package=package)
1925 if suite is not None:
1926 if not isinstance(suite, list): suite = [suite]
1927 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1929 if component is not None:
1930 if not isinstance(component, list): component = [component]
1931 q = q.join(Component).filter(Component.component_name.in_(component))
1933 if overridetype is not None:
1934 if not isinstance(overridetype, list): overridetype = [overridetype]
1935 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1939 __all__.append('get_override')
1942 ################################################################################
1944 class OverrideType(ORMObject):
1945 def __init__(self, overridetype = None):
1946 self.overridetype = overridetype
1948 def properties(self):
1949 return ['overridetype', 'overridetype_id', 'overrides_count']
1951 def not_null_constraints(self):
1952 return ['overridetype']
1954 __all__.append('OverrideType')
1957 def get_override_type(override_type, session=None):
1959 Returns OverrideType object for given C{override type}.
1961 @type override_type: string
1962 @param override_type: The name of the override type
1964 @type session: Session
1965 @param session: Optional SQLA session object (a temporary one will be
1966 generated if not supplied)
1969 @return: the database id for the given override type
1972 q = session.query(OverrideType).filter_by(overridetype=override_type)
1976 except NoResultFound:
1979 __all__.append('get_override_type')
1981 ################################################################################
1983 class PolicyQueue(object):
1984 def __init__(self, *args, **kwargs):
1988 return '<PolicyQueue %s>' % self.queue_name
1990 __all__.append('PolicyQueue')
1993 def get_policy_queue(queuename, session=None):
1995 Returns PolicyQueue object for given C{queue name}
1997 @type queuename: string
1998 @param queuename: The name of the queue
2000 @type session: Session
2001 @param session: Optional SQLA session object (a temporary one will be
2002 generated if not supplied)
2005 @return: PolicyQueue object for the given queue
2008 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2012 except NoResultFound:
2015 __all__.append('get_policy_queue')
2018 def get_policy_queue_from_path(pathname, session=None):
2020 Returns PolicyQueue object for given C{path name}
2022 @type queuename: string
2023 @param queuename: The path
2025 @type session: Session
2026 @param session: Optional SQLA session object (a temporary one will be
2027 generated if not supplied)
2030 @return: PolicyQueue object for the given queue
2033 q = session.query(PolicyQueue).filter_by(path=pathname)
2037 except NoResultFound:
2040 __all__.append('get_policy_queue_from_path')
2042 ################################################################################
2044 class Priority(ORMObject):
2045 def __init__(self, priority = None, level = None):
2046 self.priority = priority
2049 def properties(self):
2050 return ['priority', 'priority_id', 'level', 'overrides_count']
2052 def not_null_constraints(self):
2053 return ['priority', 'level']
2055 def __eq__(self, val):
2056 if isinstance(val, str):
2057 return (self.priority == val)
2058 # This signals to use the normal comparison operator
2059 return NotImplemented
2061 def __ne__(self, val):
2062 if isinstance(val, str):
2063 return (self.priority != val)
2064 # This signals to use the normal comparison operator
2065 return NotImplemented
2067 __all__.append('Priority')
2070 def get_priority(priority, session=None):
2072 Returns Priority object for given C{priority name}.
2074 @type priority: string
2075 @param priority: The name of the priority
2077 @type session: Session
2078 @param session: Optional SQLA session object (a temporary one will be
2079 generated if not supplied)
2082 @return: Priority object for the given priority
2085 q = session.query(Priority).filter_by(priority=priority)
2089 except NoResultFound:
2092 __all__.append('get_priority')
2095 def get_priorities(session=None):
2097 Returns dictionary of priority names -> id mappings
2099 @type session: Session
2100 @param session: Optional SQL session object (a temporary one will be
2101 generated if not supplied)
2104 @return: dictionary of priority names -> id mappings
2108 q = session.query(Priority)
2110 ret[x.priority] = x.priority_id
2114 __all__.append('get_priorities')
2116 ################################################################################
2118 class Section(ORMObject):
2119 def __init__(self, section = None):
2120 self.section = section
2122 def properties(self):
2123 return ['section', 'section_id', 'overrides_count']
2125 def not_null_constraints(self):
2128 def __eq__(self, val):
2129 if isinstance(val, str):
2130 return (self.section == val)
2131 # This signals to use the normal comparison operator
2132 return NotImplemented
2134 def __ne__(self, val):
2135 if isinstance(val, str):
2136 return (self.section != val)
2137 # This signals to use the normal comparison operator
2138 return NotImplemented
2140 __all__.append('Section')
2143 def get_section(section, session=None):
2145 Returns Section object for given C{section name}.
2147 @type section: string
2148 @param section: The name of the section
2150 @type session: Session
2151 @param session: Optional SQLA session object (a temporary one will be
2152 generated if not supplied)
2155 @return: Section object for the given section name
2158 q = session.query(Section).filter_by(section=section)
2162 except NoResultFound:
2165 __all__.append('get_section')
2168 def get_sections(session=None):
2170 Returns dictionary of section names -> id mappings
2172 @type session: Session
2173 @param session: Optional SQL session object (a temporary one will be
2174 generated if not supplied)
2177 @return: dictionary of section names -> id mappings
2181 q = session.query(Section)
2183 ret[x.section] = x.section_id
2187 __all__.append('get_sections')
2189 ################################################################################
2191 from debian.debfile import Deb822
2193 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
2194 class Dak822(Deb822):
2195 def _internal_parser(self, sequence, fields=None):
2196 # The key is non-whitespace, non-colon characters before any colon.
2197 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
2198 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
2199 multi = re.compile(key_part + r"$")
2200 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
2202 wanted_field = lambda f: fields is None or f in fields
2204 if isinstance(sequence, basestring):
2205 sequence = sequence.splitlines()
2209 for line in self.gpg_stripped_paragraph(sequence):
2210 m = single.match(line)
2213 self[curkey] = content
2215 if not wanted_field(m.group('key')):
2219 curkey = m.group('key')
2220 content = m.group('data')
2223 m = multi.match(line)
2226 self[curkey] = content
2228 if not wanted_field(m.group('key')):
2232 curkey = m.group('key')
2236 m = multidata.match(line)
2238 content += '\n' + line # XXX not m.group('data')?
2242 self[curkey] = content
2245 class DBSource(ORMObject):
2246 def __init__(self, source = None, version = None, maintainer = None, \
2247 changedby = None, poolfile = None, install_date = None):
2248 self.source = source
2249 self.version = version
2250 self.maintainer = maintainer
2251 self.changedby = changedby
2252 self.poolfile = poolfile
2253 self.install_date = install_date
2257 return self.source_id
2259 def properties(self):
2260 return ['source', 'source_id', 'maintainer', 'changedby', \
2261 'fingerprint', 'poolfile', 'version', 'suites_count', \
2262 'install_date', 'binaries_count']
2264 def not_null_constraints(self):
2265 return ['source', 'version', 'install_date', 'maintainer', \
2266 'changedby', 'poolfile', 'install_date']
2268 def read_control_fields(self):
2270 Reads the control information from a dsc
2273 @return: fields is the dsc information in a dictionary form
2275 fullpath = self.poolfile.fullpath
2276 fields = Dak822(open(self.poolfile.fullpath, 'r'))
2279 metadata = association_proxy('key', 'value')
2281 __all__.append('DBSource')
2284 def source_exists(source, source_version, suites = ["any"], session=None):
2286 Ensure that source exists somewhere in the archive for the binary
2287 upload being processed.
2288 1. exact match => 1.0-3
2289 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2291 @type source: string
2292 @param source: source name
2294 @type source_version: string
2295 @param source_version: expected source version
2298 @param suites: list of suites to check in, default I{any}
2300 @type session: Session
2301 @param session: Optional SQLA session object (a temporary one will be
2302 generated if not supplied)
2305 @return: returns 1 if a source with expected version is found, otherwise 0
2312 from daklib.regexes import re_bin_only_nmu
2313 orig_source_version = re_bin_only_nmu.sub('', source_version)
2315 for suite in suites:
2316 q = session.query(DBSource).filter_by(source=source). \
2317 filter(DBSource.version.in_([source_version, orig_source_version]))
2319 # source must exist in suite X, or in some other suite that's
2320 # mapped to X, recursively... silent-maps are counted too,
2321 # unreleased-maps aren't.
2322 maps = cnf.ValueList("SuiteMappings")[:]
2324 maps = [ m.split() for m in maps ]
2325 maps = [ (x[1], x[2]) for x in maps
2326 if x[0] == "map" or x[0] == "silent-map" ]
2328 for (from_, to) in maps:
2329 if from_ in s and to not in s:
2332 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2337 # No source found so return not ok
2342 __all__.append('source_exists')
2345 def get_suites_source_in(source, session=None):
2347 Returns list of Suite objects which given C{source} name is in
2350 @param source: DBSource package name to search for
2353 @return: list of Suite objects for the given source
2356 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2358 __all__.append('get_suites_source_in')
2361 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2363 Returns list of DBSource objects for given C{source} name and other parameters
2366 @param source: DBSource package name to search for
2368 @type version: str or None
2369 @param version: DBSource version name to search for or None if not applicable
2371 @type dm_upload_allowed: bool
2372 @param dm_upload_allowed: If None, no effect. If True or False, only
2373 return packages with that dm_upload_allowed setting
2375 @type session: Session
2376 @param session: Optional SQL session object (a temporary one will be
2377 generated if not supplied)
2380 @return: list of DBSource objects for the given name (may be empty)
2383 q = session.query(DBSource).filter_by(source=source)
2385 if version is not None:
2386 q = q.filter_by(version=version)
2388 if dm_upload_allowed is not None:
2389 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2393 __all__.append('get_sources_from_name')
2395 # FIXME: This function fails badly if it finds more than 1 source package and
2396 # its implementation is trivial enough to be inlined.
2398 def get_source_in_suite(source, suite, session=None):
2400 Returns a DBSource object for a combination of C{source} and C{suite}.
2402 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2403 - B{suite} - a suite name, eg. I{unstable}
2405 @type source: string
2406 @param source: source package name
2409 @param suite: the suite name
2412 @return: the version for I{source} in I{suite}
2416 q = get_suite(suite, session).get_sources(source)
2419 except NoResultFound:
2422 __all__.append('get_source_in_suite')
2425 def import_metadata_into_db(obj, session=None):
2427 This routine works on either DBBinary or DBSource objects and imports
2428 their metadata into the database
2430 fields = obj.read_control_fields()
2431 for k in fields.keys():
2434 val = str(fields[k])
2435 except UnicodeEncodeError:
2436 # Fall back to UTF-8
2438 val = fields[k].encode('utf-8')
2439 except UnicodeEncodeError:
2440 # Finally try iso8859-1
2441 val = fields[k].encode('iso8859-1')
2442 # Otherwise we allow the exception to percolate up and we cause
2443 # a reject as someone is playing silly buggers
2445 obj.metadata[get_or_set_metadatakey(k, session)] = val
2447 session.commit_or_flush()
2449 __all__.append('import_metadata_into_db')
2452 ################################################################################
2455 def add_dsc_to_db(u, filename, session=None):
2456 entry = u.pkg.files[filename]
2460 source.source = u.pkg.dsc["source"]
2461 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2462 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2463 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2464 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2465 source.install_date = datetime.now().date()
2467 dsc_component = entry["component"]
2468 dsc_location_id = entry["location id"]
2470 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2472 # Set up a new poolfile if necessary
2473 if not entry.has_key("files id") or not entry["files id"]:
2474 filename = entry["pool name"] + filename
2475 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2477 pfs.append(poolfile)
2478 entry["files id"] = poolfile.file_id
2480 source.poolfile_id = entry["files id"]
2483 suite_names = u.pkg.changes["distribution"].keys()
2484 source.suites = session.query(Suite). \
2485 filter(Suite.suite_name.in_(suite_names)).all()
2487 # Add the source files to the DB (files and dsc_files)
2489 dscfile.source_id = source.source_id
2490 dscfile.poolfile_id = entry["files id"]
2491 session.add(dscfile)
2493 for dsc_file, dentry in u.pkg.dsc_files.items():
2495 df.source_id = source.source_id
2497 # If the .orig tarball is already in the pool, it's
2498 # files id is stored in dsc_files by check_dsc().
2499 files_id = dentry.get("files id", None)
2501 # Find the entry in the files hash
2502 # TODO: Bail out here properly
2504 for f, e in u.pkg.files.items():
2509 if files_id is None:
2510 filename = dfentry["pool name"] + dsc_file
2512 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2513 # FIXME: needs to check for -1/-2 and or handle exception
2514 if found and obj is not None:
2515 files_id = obj.file_id
2518 # If still not found, add it
2519 if files_id is None:
2520 # HACK: Force sha1sum etc into dentry
2521 dentry["sha1sum"] = dfentry["sha1sum"]
2522 dentry["sha256sum"] = dfentry["sha256sum"]
2523 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2524 pfs.append(poolfile)
2525 files_id = poolfile.file_id
2527 poolfile = get_poolfile_by_id(files_id, session)
2528 if poolfile is None:
2529 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2530 pfs.append(poolfile)
2532 df.poolfile_id = files_id
2535 # Add the src_uploaders to the DB
2536 uploader_ids = [source.maintainer_id]
2537 if u.pkg.dsc.has_key("uploaders"):
2538 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2540 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2543 for up_id in uploader_ids:
2544 if added_ids.has_key(up_id):
2546 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2552 su.maintainer_id = up_id
2553 su.source_id = source.source_id
2558 return source, dsc_component, dsc_location_id, pfs
2560 __all__.append('add_dsc_to_db')
2563 def add_deb_to_db(u, filename, session=None):
2565 Contrary to what you might expect, this routine deals with both
2566 debs and udebs. That info is in 'dbtype', whilst 'type' is
2567 'deb' for both of them
2570 entry = u.pkg.files[filename]
2573 bin.package = entry["package"]
2574 bin.version = entry["version"]
2575 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2576 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2577 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2578 bin.binarytype = entry["dbtype"]
2581 filename = entry["pool name"] + filename
2582 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2583 if not entry.get("location id", None):
2584 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2586 if entry.get("files id", None):
2587 poolfile = get_poolfile_by_id(bin.poolfile_id)
2588 bin.poolfile_id = entry["files id"]
2590 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2591 bin.poolfile_id = entry["files id"] = poolfile.file_id
2594 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2595 if len(bin_sources) != 1:
2596 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2597 (bin.package, bin.version, entry["architecture"],
2598 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2600 bin.source_id = bin_sources[0].source_id
2602 if entry.has_key("built-using"):
2603 for srcname, version in entry["built-using"]:
2604 exsources = get_sources_from_name(srcname, version, session=session)
2605 if len(exsources) != 1:
2606 raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2607 (srcname, version, bin.package, bin.version, entry["architecture"],
2608 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2610 bin.extra_sources.append(exsources[0])
2612 # Add and flush object so it has an ID
2615 suite_names = u.pkg.changes["distribution"].keys()
2616 bin.suites = session.query(Suite). \
2617 filter(Suite.suite_name.in_(suite_names)).all()
2621 # Deal with contents - disabled for now
2622 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2624 # print "REJECT\nCould not determine contents of package %s" % bin.package
2625 # session.rollback()
2626 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2628 return bin, poolfile
2630 __all__.append('add_deb_to_db')
2632 ################################################################################
2634 class SourceACL(object):
2635 def __init__(self, *args, **kwargs):
2639 return '<SourceACL %s>' % self.source_acl_id
2641 __all__.append('SourceACL')
2643 ################################################################################
2645 class SrcFormat(object):
2646 def __init__(self, *args, **kwargs):
2650 return '<SrcFormat %s>' % (self.format_name)
2652 __all__.append('SrcFormat')
2654 ################################################################################
2656 class SrcUploader(object):
2657 def __init__(self, *args, **kwargs):
2661 return '<SrcUploader %s>' % self.uploader_id
2663 __all__.append('SrcUploader')
2665 ################################################################################
2667 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2668 ('SuiteID', 'suite_id'),
2669 ('Version', 'version'),
2670 ('Origin', 'origin'),
2672 ('Description', 'description'),
2673 ('Untouchable', 'untouchable'),
2674 ('Announce', 'announce'),
2675 ('Codename', 'codename'),
2676 ('OverrideCodename', 'overridecodename'),
2677 ('ValidTime', 'validtime'),
2678 ('Priority', 'priority'),
2679 ('NotAutomatic', 'notautomatic'),
2680 ('CopyChanges', 'copychanges'),
2681 ('OverrideSuite', 'overridesuite')]
2683 # Why the heck don't we have any UNIQUE constraints in table suite?
2684 # TODO: Add UNIQUE constraints for appropriate columns.
2685 class Suite(ORMObject):
2686 def __init__(self, suite_name = None, version = None):
2687 self.suite_name = suite_name
2688 self.version = version
2690 def properties(self):
2691 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2694 def not_null_constraints(self):
2695 return ['suite_name', 'version']
2697 def __eq__(self, val):
2698 if isinstance(val, str):
2699 return (self.suite_name == val)
2700 # This signals to use the normal comparison operator
2701 return NotImplemented
2703 def __ne__(self, val):
2704 if isinstance(val, str):
2705 return (self.suite_name != val)
2706 # This signals to use the normal comparison operator
2707 return NotImplemented
2711 for disp, field in SUITE_FIELDS:
2712 val = getattr(self, field, None)
2714 ret.append("%s: %s" % (disp, val))
2716 return "\n".join(ret)
2718 def get_architectures(self, skipsrc=False, skipall=False):
2720 Returns list of Architecture objects
2722 @type skipsrc: boolean
2723 @param skipsrc: Whether to skip returning the 'source' architecture entry
2726 @type skipall: boolean
2727 @param skipall: Whether to skip returning the 'all' architecture entry
2731 @return: list of Architecture objects for the given name (may be empty)
2734 q = object_session(self).query(Architecture).with_parent(self)
2736 q = q.filter(Architecture.arch_string != 'source')
2738 q = q.filter(Architecture.arch_string != 'all')
2739 return q.order_by(Architecture.arch_string).all()
2741 def get_sources(self, source):
2743 Returns a query object representing DBSource that is part of C{suite}.
2745 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2747 @type source: string
2748 @param source: source package name
2750 @rtype: sqlalchemy.orm.query.Query
2751 @return: a query of DBSource
2755 session = object_session(self)
2756 return session.query(DBSource).filter_by(source = source). \
2759 __all__.append('Suite')
2762 def get_suite(suite, session=None):
2764 Returns Suite object for given C{suite name}.
2767 @param suite: The name of the suite
2769 @type session: Session
2770 @param session: Optional SQLA session object (a temporary one will be
2771 generated if not supplied)
2774 @return: Suite object for the requested suite name (None if not present)
2777 q = session.query(Suite).filter_by(suite_name=suite)
2781 except NoResultFound:
2784 __all__.append('get_suite')
2786 ################################################################################
2788 # TODO: should be removed because the implementation is too trivial
2790 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2792 Returns list of Architecture objects for given C{suite} name
2795 @param suite: Suite name to search for
2797 @type skipsrc: boolean
2798 @param skipsrc: Whether to skip returning the 'source' architecture entry
2801 @type skipall: boolean
2802 @param skipall: Whether to skip returning the 'all' architecture entry
2805 @type session: Session
2806 @param session: Optional SQL session object (a temporary one will be
2807 generated if not supplied)
2810 @return: list of Architecture objects for the given name (may be empty)
2813 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2815 __all__.append('get_suite_architectures')
2817 ################################################################################
2819 class SuiteSrcFormat(object):
2820 def __init__(self, *args, **kwargs):
2824 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2826 __all__.append('SuiteSrcFormat')
2829 def get_suite_src_formats(suite, session=None):
2831 Returns list of allowed SrcFormat for C{suite}.
2834 @param suite: Suite name to search for
2836 @type session: Session
2837 @param session: Optional SQL session object (a temporary one will be
2838 generated if not supplied)
2841 @return: the list of allowed source formats for I{suite}
2844 q = session.query(SrcFormat)
2845 q = q.join(SuiteSrcFormat)
2846 q = q.join(Suite).filter_by(suite_name=suite)
2847 q = q.order_by('format_name')
2851 __all__.append('get_suite_src_formats')
2853 ################################################################################
2855 class Uid(ORMObject):
2856 def __init__(self, uid = None, name = None):
2860 def __eq__(self, val):
2861 if isinstance(val, str):
2862 return (self.uid == val)
2863 # This signals to use the normal comparison operator
2864 return NotImplemented
2866 def __ne__(self, val):
2867 if isinstance(val, str):
2868 return (self.uid != val)
2869 # This signals to use the normal comparison operator
2870 return NotImplemented
2872 def properties(self):
2873 return ['uid', 'name', 'fingerprint']
2875 def not_null_constraints(self):
2878 __all__.append('Uid')
2881 def get_or_set_uid(uidname, session=None):
2883 Returns uid object for given uidname.
2885 If no matching uidname is found, a row is inserted.
2887 @type uidname: string
2888 @param uidname: The uid to add
2890 @type session: SQLAlchemy
2891 @param session: Optional SQL session object (a temporary one will be
2892 generated if not supplied). If not passed, a commit will be performed at
2893 the end of the function, otherwise the caller is responsible for commiting.
2896 @return: the uid object for the given uidname
2899 q = session.query(Uid).filter_by(uid=uidname)
2903 except NoResultFound:
2907 session.commit_or_flush()
2912 __all__.append('get_or_set_uid')
2915 def get_uid_from_fingerprint(fpr, session=None):
2916 q = session.query(Uid)
2917 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2921 except NoResultFound:
2924 __all__.append('get_uid_from_fingerprint')
2926 ################################################################################
2928 class UploadBlock(object):
2929 def __init__(self, *args, **kwargs):
2933 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2935 __all__.append('UploadBlock')
2937 ################################################################################
2939 class MetadataKey(ORMObject):
2940 def __init__(self, key = None):
2943 def properties(self):
2946 def not_null_constraints(self):
2949 __all__.append('MetadataKey')
2952 def get_or_set_metadatakey(keyname, session=None):
2954 Returns MetadataKey object for given uidname.
2956 If no matching keyname is found, a row is inserted.
2958 @type uidname: string
2959 @param uidname: The keyname to add
2961 @type session: SQLAlchemy
2962 @param session: Optional SQL session object (a temporary one will be
2963 generated if not supplied). If not passed, a commit will be performed at
2964 the end of the function, otherwise the caller is responsible for commiting.
2967 @return: the metadatakey object for the given keyname
2970 q = session.query(MetadataKey).filter_by(key=keyname)
2974 except NoResultFound:
2975 ret = MetadataKey(keyname)
2977 session.commit_or_flush()
2981 __all__.append('get_or_set_metadatakey')
2983 ################################################################################
2985 class BinaryMetadata(ORMObject):
2986 def __init__(self, key = None, value = None, binary = None):
2989 self.binary = binary
2991 def properties(self):
2992 return ['binary', 'key', 'value']
2994 def not_null_constraints(self):
2997 __all__.append('BinaryMetadata')
2999 ################################################################################
3001 class SourceMetadata(ORMObject):
3002 def __init__(self, key = None, value = None, source = None):
3005 self.source = source
3007 def properties(self):
3008 return ['source', 'key', 'value']
3010 def not_null_constraints(self):
3013 __all__.append('SourceMetadata')
3015 ################################################################################
3017 class DBConn(object):
3019 database module init.
3023 def __init__(self, *args, **kwargs):
3024 self.__dict__ = self.__shared_state
3026 if not getattr(self, 'initialised', False):
3027 self.initialised = True
3028 self.debug = kwargs.has_key('debug')
3031 def __setuptables(self):
3038 'binaries_metadata',
3042 'build_queue_files',
3047 'changes_pending_binaries',
3048 'changes_pending_files',
3049 'changes_pending_source',
3050 'changes_pending_files_map',
3051 'changes_pending_source_files',
3052 'changes_pool_files',
3054 'extra_src_references',
3063 # TODO: the maintainer column in table override should be removed.
3076 'suite_architectures',
3077 'suite_build_queue_copy',
3078 'suite_src_formats',
3084 'almost_obsolete_all_associations',
3085 'almost_obsolete_src_associations',
3086 'any_associations_source',
3087 'bin_assoc_by_arch',
3088 'bin_associations_binaries',
3089 'binaries_suite_arch',
3090 'binfiles_suite_component_arch',
3093 'newest_all_associations',
3094 'newest_any_associations',
3096 'newest_src_association',
3097 'obsolete_all_associations',
3098 'obsolete_any_associations',
3099 'obsolete_any_by_all_associations',
3100 'obsolete_src_associations',
3102 'src_associations_bin',
3103 'src_associations_src',
3104 'suite_arch_by_name',
3107 for table_name in tables:
3108 table = Table(table_name, self.db_meta, \
3109 autoload=True, useexisting=True)
3110 setattr(self, 'tbl_%s' % table_name, table)
3112 for view_name in views:
3113 view = Table(view_name, self.db_meta, autoload=True)
3114 setattr(self, 'view_%s' % view_name, view)
3116 def __setupmappers(self):
3117 mapper(Architecture, self.tbl_architecture,
3118 properties = dict(arch_id = self.tbl_architecture.c.id,
3119 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3120 order_by='suite_name',
3121 backref=backref('architectures', order_by='arch_string'))),
3122 extension = validator)
3124 mapper(Archive, self.tbl_archive,
3125 properties = dict(archive_id = self.tbl_archive.c.id,
3126 archive_name = self.tbl_archive.c.name))
3128 mapper(BuildQueue, self.tbl_build_queue,
3129 properties = dict(queue_id = self.tbl_build_queue.c.id))
3131 mapper(BuildQueueFile, self.tbl_build_queue_files,
3132 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3133 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3135 mapper(DBBinary, self.tbl_binaries,
3136 properties = dict(binary_id = self.tbl_binaries.c.id,
3137 package = self.tbl_binaries.c.package,
3138 version = self.tbl_binaries.c.version,
3139 maintainer_id = self.tbl_binaries.c.maintainer,
3140 maintainer = relation(Maintainer),
3141 source_id = self.tbl_binaries.c.source,
3142 source = relation(DBSource, backref='binaries'),
3143 arch_id = self.tbl_binaries.c.architecture,
3144 architecture = relation(Architecture),
3145 poolfile_id = self.tbl_binaries.c.file,
3146 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3147 binarytype = self.tbl_binaries.c.type,
3148 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3149 fingerprint = relation(Fingerprint),
3150 install_date = self.tbl_binaries.c.install_date,
3151 suites = relation(Suite, secondary=self.tbl_bin_associations,
3152 backref=backref('binaries', lazy='dynamic')),
3153 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3154 backref=backref('extra_binary_references', lazy='dynamic')),
3155 key = relation(BinaryMetadata, cascade='all',
3156 collection_class=attribute_mapped_collection('key'))),
3157 extension = validator)
3159 mapper(BinaryACL, self.tbl_binary_acl,
3160 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3162 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3163 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3164 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3165 architecture = relation(Architecture)))
3167 mapper(Component, self.tbl_component,
3168 properties = dict(component_id = self.tbl_component.c.id,
3169 component_name = self.tbl_component.c.name),
3170 extension = validator)
3172 mapper(DBConfig, self.tbl_config,
3173 properties = dict(config_id = self.tbl_config.c.id))
3175 mapper(DSCFile, self.tbl_dsc_files,
3176 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3177 source_id = self.tbl_dsc_files.c.source,
3178 source = relation(DBSource),
3179 poolfile_id = self.tbl_dsc_files.c.file,
3180 poolfile = relation(PoolFile)))
3182 mapper(PoolFile, self.tbl_files,
3183 properties = dict(file_id = self.tbl_files.c.id,
3184 filesize = self.tbl_files.c.size,
3185 location_id = self.tbl_files.c.location,
3186 location = relation(Location,
3187 # using lazy='dynamic' in the back
3188 # reference because we have A LOT of
3189 # files in one location
3190 backref=backref('files', lazy='dynamic'))),
3191 extension = validator)
3193 mapper(Fingerprint, self.tbl_fingerprint,
3194 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3195 uid_id = self.tbl_fingerprint.c.uid,
3196 uid = relation(Uid),
3197 keyring_id = self.tbl_fingerprint.c.keyring,
3198 keyring = relation(Keyring),
3199 source_acl = relation(SourceACL),
3200 binary_acl = relation(BinaryACL)),
3201 extension = validator)
3203 mapper(Keyring, self.tbl_keyrings,
3204 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3205 keyring_id = self.tbl_keyrings.c.id))
3207 mapper(DBChange, self.tbl_changes,
3208 properties = dict(change_id = self.tbl_changes.c.id,
3209 poolfiles = relation(PoolFile,
3210 secondary=self.tbl_changes_pool_files,
3211 backref="changeslinks"),
3212 seen = self.tbl_changes.c.seen,
3213 source = self.tbl_changes.c.source,
3214 binaries = self.tbl_changes.c.binaries,
3215 architecture = self.tbl_changes.c.architecture,
3216 distribution = self.tbl_changes.c.distribution,
3217 urgency = self.tbl_changes.c.urgency,
3218 maintainer = self.tbl_changes.c.maintainer,
3219 changedby = self.tbl_changes.c.changedby,
3220 date = self.tbl_changes.c.date,
3221 version = self.tbl_changes.c.version,
3222 files = relation(ChangePendingFile,
3223 secondary=self.tbl_changes_pending_files_map,
3224 backref="changesfile"),
3225 in_queue_id = self.tbl_changes.c.in_queue,
3226 in_queue = relation(PolicyQueue,
3227 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3228 approved_for_id = self.tbl_changes.c.approved_for))
3230 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3231 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3233 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3234 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3235 filename = self.tbl_changes_pending_files.c.filename,
3236 size = self.tbl_changes_pending_files.c.size,
3237 md5sum = self.tbl_changes_pending_files.c.md5sum,
3238 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3239 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3241 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3242 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3243 change = relation(DBChange),
3244 maintainer = relation(Maintainer,
3245 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3246 changedby = relation(Maintainer,
3247 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3248 fingerprint = relation(Fingerprint),
3249 source_files = relation(ChangePendingFile,
3250 secondary=self.tbl_changes_pending_source_files,
3251 backref="pending_sources")))
3254 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3255 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3256 keyring = relation(Keyring, backref="keyring_acl_map"),
3257 architecture = relation(Architecture)))
3259 mapper(Location, self.tbl_location,
3260 properties = dict(location_id = self.tbl_location.c.id,
3261 component_id = self.tbl_location.c.component,
3262 component = relation(Component, backref='location'),
3263 archive_id = self.tbl_location.c.archive,
3264 archive = relation(Archive),
3265 # FIXME: the 'type' column is old cruft and
3266 # should be removed in the future.
3267 archive_type = self.tbl_location.c.type),
3268 extension = validator)
3270 mapper(Maintainer, self.tbl_maintainer,
3271 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3272 maintains_sources = relation(DBSource, backref='maintainer',
3273 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3274 changed_sources = relation(DBSource, backref='changedby',
3275 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3276 extension = validator)
3278 mapper(NewComment, self.tbl_new_comments,
3279 properties = dict(comment_id = self.tbl_new_comments.c.id))
3281 mapper(Override, self.tbl_override,
3282 properties = dict(suite_id = self.tbl_override.c.suite,
3283 suite = relation(Suite, \
3284 backref=backref('overrides', lazy='dynamic')),
3285 package = self.tbl_override.c.package,
3286 component_id = self.tbl_override.c.component,
3287 component = relation(Component, \
3288 backref=backref('overrides', lazy='dynamic')),
3289 priority_id = self.tbl_override.c.priority,
3290 priority = relation(Priority, \
3291 backref=backref('overrides', lazy='dynamic')),
3292 section_id = self.tbl_override.c.section,
3293 section = relation(Section, \
3294 backref=backref('overrides', lazy='dynamic')),
3295 overridetype_id = self.tbl_override.c.type,
3296 overridetype = relation(OverrideType, \
3297 backref=backref('overrides', lazy='dynamic'))))
3299 mapper(OverrideType, self.tbl_override_type,
3300 properties = dict(overridetype = self.tbl_override_type.c.type,
3301 overridetype_id = self.tbl_override_type.c.id))
3303 mapper(PolicyQueue, self.tbl_policy_queue,
3304 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3306 mapper(Priority, self.tbl_priority,
3307 properties = dict(priority_id = self.tbl_priority.c.id))
3309 mapper(Section, self.tbl_section,
3310 properties = dict(section_id = self.tbl_section.c.id,
3311 section=self.tbl_section.c.section))
3313 mapper(DBSource, self.tbl_source,
3314 properties = dict(source_id = self.tbl_source.c.id,
3315 version = self.tbl_source.c.version,
3316 maintainer_id = self.tbl_source.c.maintainer,
3317 poolfile_id = self.tbl_source.c.file,
3318 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3319 fingerprint_id = self.tbl_source.c.sig_fpr,
3320 fingerprint = relation(Fingerprint),
3321 changedby_id = self.tbl_source.c.changedby,
3322 srcfiles = relation(DSCFile,
3323 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3324 suites = relation(Suite, secondary=self.tbl_src_associations,
3325 backref=backref('sources', lazy='dynamic')),
3326 srcuploaders = relation(SrcUploader),
3327 key = relation(SourceMetadata, cascade='all',
3328 collection_class=attribute_mapped_collection('key'))),
3329 extension = validator)
3331 mapper(SourceACL, self.tbl_source_acl,
3332 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3334 mapper(SrcFormat, self.tbl_src_format,
3335 properties = dict(src_format_id = self.tbl_src_format.c.id,
3336 format_name = self.tbl_src_format.c.format_name))
3338 mapper(SrcUploader, self.tbl_src_uploaders,
3339 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3340 source_id = self.tbl_src_uploaders.c.source,
3341 source = relation(DBSource,
3342 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3343 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3344 maintainer = relation(Maintainer,
3345 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3347 mapper(Suite, self.tbl_suite,
3348 properties = dict(suite_id = self.tbl_suite.c.id,
3349 policy_queue = relation(PolicyQueue),
3350 copy_queues = relation(BuildQueue,
3351 secondary=self.tbl_suite_build_queue_copy)),
3352 extension = validator)
3354 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3355 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3356 suite = relation(Suite, backref='suitesrcformats'),
3357 src_format_id = self.tbl_suite_src_formats.c.src_format,
3358 src_format = relation(SrcFormat)))
3360 mapper(Uid, self.tbl_uid,
3361 properties = dict(uid_id = self.tbl_uid.c.id,
3362 fingerprint = relation(Fingerprint)),
3363 extension = validator)
3365 mapper(UploadBlock, self.tbl_upload_blocks,
3366 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3367 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3368 uid = relation(Uid, backref="uploadblocks")))
3370 mapper(BinContents, self.tbl_bin_contents,
3372 binary = relation(DBBinary,
3373 backref=backref('contents', lazy='dynamic', cascade='all')),
3374 file = self.tbl_bin_contents.c.file))
3376 mapper(MetadataKey, self.tbl_metadata_keys,
3378 key_id = self.tbl_metadata_keys.c.key_id,
3379 key = self.tbl_metadata_keys.c.key))
3381 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3383 binary_id = self.tbl_binaries_metadata.c.bin_id,
3384 binary = relation(DBBinary),
3385 key_id = self.tbl_binaries_metadata.c.key_id,
3386 key = relation(MetadataKey),
3387 value = self.tbl_binaries_metadata.c.value))
3389 mapper(SourceMetadata, self.tbl_source_metadata,
3391 source_id = self.tbl_source_metadata.c.src_id,
3392 source = relation(DBSource),
3393 key_id = self.tbl_source_metadata.c.key_id,
3394 key = relation(MetadataKey),
3395 value = self.tbl_source_metadata.c.value))
3397 ## Connection functions
3398 def __createconn(self):
3399 from config import Config
3401 if cnf.has_key("DB::Service"):
3402 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3403 elif cnf.has_key("DB::Host"):
3405 connstr = "postgresql://%s" % cnf["DB::Host"]
3406 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3407 connstr += ":%s" % cnf["DB::Port"]
3408 connstr += "/%s" % cnf["DB::Name"]
3411 connstr = "postgresql:///%s" % cnf["DB::Name"]
3412 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3413 connstr += "?port=%s" % cnf["DB::Port"]
3415 engine_args = { 'echo': self.debug }
3416 if cnf.has_key('DB::PoolSize'):
3417 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3418 if cnf.has_key('DB::MaxOverflow'):
3419 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3420 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3421 cnf['DB::Unicode'] == 'false':
3422 engine_args['use_native_unicode'] = False
3424 # Monkey patch a new dialect in in order to support service= syntax
3425 import sqlalchemy.dialects.postgresql
3426 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3427 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3428 def create_connect_args(self, url):
3429 if str(url).startswith('postgresql://service='):
3431 servicename = str(url)[21:]
3432 return (['service=%s' % servicename], {})
3434 return PGDialect_psycopg2.create_connect_args(self, url)
3436 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3438 self.db_pg = create_engine(connstr, **engine_args)
3439 self.db_meta = MetaData()
3440 self.db_meta.bind = self.db_pg
3441 self.db_smaker = sessionmaker(bind=self.db_pg,
3445 self.__setuptables()
3446 self.__setupmappers()
3447 self.pid = os.getpid()
3450 # reinitialize DBConn in new processes
3451 if self.pid != os.getpid():
3454 return self.db_smaker()
3456 __all__.append('DBConn')