5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
63 from sqlalchemy import types as sqltypes
64 from sqlalchemy.orm.collections import attribute_mapped_collection
65 from sqlalchemy.ext.associationproxy import association_proxy
67 # Don't remove this, we re-export the exceptions to scripts which import us
68 from sqlalchemy.exc import *
69 from sqlalchemy.orm.exc import NoResultFound
71 # Only import Config until Queue stuff is changed to store its config
73 from config import Config
74 from textutils import fix_maintainer
75 from dak_exceptions import DBUpdateError, NoSourceFieldError
77 # suppress some deprecation warnings in squeeze related to sqlalchemy
79 warnings.filterwarnings('ignore', \
80 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 ################################################################################
86 # Patch in support for the debversion field type so that it works during
90 # that is for sqlalchemy 0.6
91 UserDefinedType = sqltypes.UserDefinedType
93 # this one for sqlalchemy 0.5
94 UserDefinedType = sqltypes.TypeEngine
96 class DebVersion(UserDefinedType):
97 def get_col_spec(self):
100 def bind_processor(self, dialect):
103 # ' = None' is needed for sqlalchemy 0.5:
104 def result_processor(self, dialect, coltype = None):
107 sa_major_version = sqlalchemy.__version__[0:3]
108 if sa_major_version in ["0.5", "0.6"]:
109 from sqlalchemy.databases import postgres
110 postgres.ischema_names['debversion'] = DebVersion
112 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
114 ################################################################################
116 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
118 ################################################################################
120 def session_wrapper(fn):
122 Wrapper around common ".., session=None):" handling. If the wrapped
123 function is called without passing 'session', we create a local one
124 and destroy it when the function ends.
126 Also attaches a commit_or_flush method to the session; if we created a
127 local session, this is a synonym for session.commit(), otherwise it is a
128 synonym for session.flush().
131 def wrapped(*args, **kwargs):
132 private_transaction = False
134 # Find the session object
135 session = kwargs.get('session')
138 if len(args) <= len(getargspec(fn)[0]) - 1:
139 # No session specified as last argument or in kwargs
140 private_transaction = True
141 session = kwargs['session'] = DBConn().session()
143 # Session is last argument in args
147 session = args[-1] = DBConn().session()
148 private_transaction = True
150 if private_transaction:
151 session.commit_or_flush = session.commit
153 session.commit_or_flush = session.flush
156 return fn(*args, **kwargs)
158 if private_transaction:
159 # We created a session; close it.
162 wrapped.__doc__ = fn.__doc__
163 wrapped.func_name = fn.func_name
167 __all__.append('session_wrapper')
169 ################################################################################
171 class ORMObject(object):
173 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
174 derived classes must implement the properties() method.
177 def properties(self):
179 This method should be implemented by all derived classes and returns a
180 list of the important properties. The properties 'created' and
181 'modified' will be added automatically. A suffix '_count' should be
182 added to properties that are lists or query objects. The most important
183 property name should be returned as the first element in the list
184 because it is used by repr().
190 Returns a JSON representation of the object based on the properties
191 returned from the properties() method.
194 # add created and modified
195 all_properties = self.properties() + ['created', 'modified']
196 for property in all_properties:
197 # check for list or query
198 if property[-6:] == '_count':
199 real_property = property[:-6]
200 if not hasattr(self, real_property):
202 value = getattr(self, real_property)
203 if hasattr(value, '__len__'):
206 elif hasattr(value, 'count'):
208 value = value.count()
210 raise KeyError('Do not understand property %s.' % property)
212 if not hasattr(self, property):
215 value = getattr(self, property)
219 elif isinstance(value, ORMObject):
220 # use repr() for ORMObject types
223 # we want a string for all other types because json cannot
226 data[property] = value
227 return json.dumps(data)
231 Returns the name of the class.
233 return type(self).__name__
237 Returns a short string representation of the object using the first
238 element from the properties() method.
240 primary_property = self.properties()[0]
241 value = getattr(self, primary_property)
242 return '<%s %s>' % (self.classname(), str(value))
246 Returns a human readable form of the object using the properties()
249 return '<%s %s>' % (self.classname(), self.json())
251 def not_null_constraints(self):
253 Returns a list of properties that must be not NULL. Derived classes
254 should override this method if needed.
258 validation_message = \
259 "Validation failed because property '%s' must not be empty in object\n%s"
263 This function validates the not NULL constraints as returned by
264 not_null_constraints(). It raises the DBUpdateError exception if
267 for property in self.not_null_constraints():
268 # TODO: It is a bit awkward that the mapper configuration allow
269 # directly setting the numeric _id columns. We should get rid of it
271 if hasattr(self, property + '_id') and \
272 getattr(self, property + '_id') is not None:
274 if not hasattr(self, property) or getattr(self, property) is None:
275 raise DBUpdateError(self.validation_message % \
276 (property, str(self)))
280 def get(cls, primary_key, session = None):
282 This is a support function that allows getting an object by its primary
285 Architecture.get(3[, session])
287 instead of the more verbose
289 session.query(Architecture).get(3)
291 return session.query(cls).get(primary_key)
293 def session(self, replace = False):
295 Returns the current session that is associated with the object. May
296 return None is object is in detached state.
299 return object_session(self)
301 def clone(self, session = None):
303 Clones the current object in a new session and returns the new clone. A
304 fresh session is created if the optional session parameter is not
305 provided. The function will fail if a session is provided and has
308 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
309 an existing object to allow several threads to work with their own
310 instances of an ORMObject.
312 WARNING: Only persistent (committed) objects can be cloned. Changes
313 made to the original object that are not committed yet will get lost.
314 The session of the new object will always be rolled back to avoid
318 if self.session() is None:
319 raise RuntimeError( \
320 'Method clone() failed for detached object:\n%s' % self)
321 self.session().flush()
322 mapper = object_mapper(self)
323 primary_key = mapper.primary_key_from_instance(self)
324 object_class = self.__class__
326 session = DBConn().session()
327 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
328 raise RuntimeError( \
329 'Method clone() failed due to unflushed changes in session.')
330 new_object = session.query(object_class).get(primary_key)
332 if new_object is None:
333 raise RuntimeError( \
334 'Method clone() failed for non-persistent object:\n%s' % self)
337 __all__.append('ORMObject')
339 ################################################################################
341 class Validator(MapperExtension):
343 This class calls the validate() method for each instance for the
344 'before_update' and 'before_insert' events. A global object validator is
345 used for configuring the individual mappers.
348 def before_update(self, mapper, connection, instance):
352 def before_insert(self, mapper, connection, instance):
356 validator = Validator()
358 ################################################################################
360 class Architecture(ORMObject):
361 def __init__(self, arch_string = None, description = None):
362 self.arch_string = arch_string
363 self.description = description
365 def __eq__(self, val):
366 if isinstance(val, str):
367 return (self.arch_string== val)
368 # This signals to use the normal comparison operator
369 return NotImplemented
371 def __ne__(self, val):
372 if isinstance(val, str):
373 return (self.arch_string != val)
374 # This signals to use the normal comparison operator
375 return NotImplemented
377 def properties(self):
378 return ['arch_string', 'arch_id', 'suites_count']
380 def not_null_constraints(self):
381 return ['arch_string']
383 __all__.append('Architecture')
386 def get_architecture(architecture, session=None):
388 Returns database id for given C{architecture}.
390 @type architecture: string
391 @param architecture: The name of the architecture
393 @type session: Session
394 @param session: Optional SQLA session object (a temporary one will be
395 generated if not supplied)
398 @return: Architecture object for the given arch (None if not present)
401 q = session.query(Architecture).filter_by(arch_string=architecture)
405 except NoResultFound:
408 __all__.append('get_architecture')
410 # TODO: should be removed because the implementation is too trivial
412 def get_architecture_suites(architecture, session=None):
414 Returns list of Suite objects for given C{architecture} name
416 @type architecture: str
417 @param architecture: Architecture name to search for
419 @type session: Session
420 @param session: Optional SQL session object (a temporary one will be
421 generated if not supplied)
424 @return: list of Suite objects for the given name (may be empty)
427 return get_architecture(architecture, session).suites
429 __all__.append('get_architecture_suites')
431 ################################################################################
433 class Archive(object):
434 def __init__(self, *args, **kwargs):
438 return '<Archive %s>' % self.archive_name
440 __all__.append('Archive')
443 def get_archive(archive, session=None):
445 returns database id for given C{archive}.
447 @type archive: string
448 @param archive: the name of the arhive
450 @type session: Session
451 @param session: Optional SQLA session object (a temporary one will be
452 generated if not supplied)
455 @return: Archive object for the given name (None if not present)
458 archive = archive.lower()
460 q = session.query(Archive).filter_by(archive_name=archive)
464 except NoResultFound:
467 __all__.append('get_archive')
469 ################################################################################
471 class BinContents(ORMObject):
472 def __init__(self, file = None, binary = None):
476 def properties(self):
477 return ['file', 'binary']
479 __all__.append('BinContents')
481 ################################################################################
483 class DBBinary(ORMObject):
484 def __init__(self, package = None, source = None, version = None, \
485 maintainer = None, architecture = None, poolfile = None, \
487 self.package = package
489 self.version = version
490 self.maintainer = maintainer
491 self.architecture = architecture
492 self.poolfile = poolfile
493 self.binarytype = binarytype
495 def properties(self):
496 return ['package', 'version', 'maintainer', 'source', 'architecture', \
497 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
498 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
500 def not_null_constraints(self):
501 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
504 metadata = association_proxy('key', 'value')
506 def get_component_name(self):
507 return self.poolfile.location.component.component_name
509 def scan_contents(self):
511 Yields the contents of the package. Only regular files are yielded and
512 the path names are normalized after converting them from either utf-8
513 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
514 package does not contain any regular file.
516 fullpath = self.poolfile.fullpath
517 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE)
518 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
519 for member in tar.getmembers():
520 if not member.isdir():
521 name = normpath(member.name)
522 # enforce proper utf-8 encoding
525 except UnicodeDecodeError:
526 name = name.decode('iso8859-1').encode('utf-8')
532 def read_control(self):
534 Reads the control information from a binary.
537 @return: (stanza, controldict) stanza is the text of the control
538 section. controldict is the information in a dictionary
541 import apt_inst, apt_pk
542 fullpath = self.poolfile.fullpath
543 deb_file = open(fullpath, 'r')
544 stanza = apt_inst.debExtractControl(deb_file).rstrip()
545 control = dict(apt_pkg.TagSection(stanza))
548 return stanza, control
551 __all__.append('DBBinary')
554 def get_suites_binary_in(package, session=None):
556 Returns list of Suite objects which given C{package} name is in
559 @param package: DBBinary package name to search for
562 @return: list of Suite objects for the given package
565 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
567 __all__.append('get_suites_binary_in')
570 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
572 Returns the component name of the newest binary package in suite_list or
573 None if no package is found. The result can be optionally filtered by a list
574 of architecture names.
577 @param package: DBBinary package name to search for
579 @type suite_list: list of str
580 @param suite_list: list of suite_name items
582 @type arch_list: list of str
583 @param arch_list: optional list of arch_string items that defaults to []
585 @rtype: str or NoneType
586 @return: name of component or None
589 q = session.query(DBBinary).filter_by(package = package). \
590 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
591 if len(arch_list) > 0:
592 q = q.join(DBBinary.architecture). \
593 filter(Architecture.arch_string.in_(arch_list))
594 binary = q.order_by(desc(DBBinary.version)).first()
598 return binary.get_component_name()
600 __all__.append('get_component_by_package_suite')
602 ################################################################################
604 class BinaryACL(object):
605 def __init__(self, *args, **kwargs):
609 return '<BinaryACL %s>' % self.binary_acl_id
611 __all__.append('BinaryACL')
613 ################################################################################
615 class BinaryACLMap(object):
616 def __init__(self, *args, **kwargs):
620 return '<BinaryACLMap %s>' % self.binary_acl_map_id
622 __all__.append('BinaryACLMap')
624 ################################################################################
629 ArchiveDir "%(archivepath)s";
630 OverrideDir "%(overridedir)s";
631 CacheDir "%(cachedir)s";
636 Packages::Compress ". bzip2 gzip";
637 Sources::Compress ". bzip2 gzip";
642 bindirectory "incoming"
647 BinOverride "override.sid.all3";
648 BinCacheDB "packages-accepted.db";
650 FileList "%(filelist)s";
653 Packages::Extensions ".deb .udeb";
656 bindirectory "incoming/"
659 BinOverride "override.sid.all3";
660 SrcOverride "override.sid.all3.src";
661 FileList "%(filelist)s";
665 class BuildQueue(object):
666 def __init__(self, *args, **kwargs):
670 return '<BuildQueue %s>' % self.queue_name
672 def write_metadata(self, starttime, force=False):
673 # Do we write out metafiles?
674 if not (force or self.generate_metadata):
677 session = DBConn().session().object_session(self)
679 fl_fd = fl_name = ac_fd = ac_name = None
681 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
682 startdir = os.getcwd()
685 # Grab files we want to include
686 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
687 # Write file list with newer files
688 (fl_fd, fl_name) = mkstemp()
690 os.write(fl_fd, '%s\n' % n.fullpath)
695 # Write minimal apt.conf
696 # TODO: Remove hardcoding from template
697 (ac_fd, ac_name) = mkstemp()
698 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
700 'cachedir': cnf["Dir::Cache"],
701 'overridedir': cnf["Dir::Override"],
705 # Run apt-ftparchive generate
706 os.chdir(os.path.dirname(ac_name))
707 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
709 # Run apt-ftparchive release
710 # TODO: Eww - fix this
711 bname = os.path.basename(self.path)
715 # We have to remove the Release file otherwise it'll be included in the
718 os.unlink(os.path.join(bname, 'Release'))
722 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
724 # Crude hack with open and append, but this whole section is and should be redone.
725 if self.notautomatic:
726 release=open("Release", "a")
727 release.write("NotAutomatic: yes")
732 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
733 if cnf.has_key("Dinstall::SigningPubKeyring"):
734 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
736 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
738 # Move the files if we got this far
739 os.rename('Release', os.path.join(bname, 'Release'))
741 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
743 # Clean up any left behind files
770 def clean_and_update(self, starttime, Logger, dryrun=False):
771 """WARNING: This routine commits for you"""
772 session = DBConn().session().object_session(self)
774 if self.generate_metadata and not dryrun:
775 self.write_metadata(starttime)
777 # Grab files older than our execution time
778 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
784 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
786 Logger.log(["I: Removing %s from the queue" % o.fullpath])
787 os.unlink(o.fullpath)
790 # If it wasn't there, don't worry
791 if e.errno == ENOENT:
794 # TODO: Replace with proper logging call
795 Logger.log(["E: Could not remove %s" % o.fullpath])
802 for f in os.listdir(self.path):
803 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
807 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
808 except NoResultFound:
809 fp = os.path.join(self.path, f)
811 Logger.log(["I: Would remove unused link %s" % fp])
813 Logger.log(["I: Removing unused link %s" % fp])
817 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
819 def add_file_from_pool(self, poolfile):
820 """Copies a file into the pool. Assumes that the PoolFile object is
821 attached to the same SQLAlchemy session as the Queue object is.
823 The caller is responsible for committing after calling this function."""
824 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
826 # Check if we have a file of this name or this ID already
827 for f in self.queuefiles:
828 if f.fileid is not None and f.fileid == poolfile.file_id or \
829 f.poolfile.filename == poolfile_basename:
830 # In this case, update the BuildQueueFile entry so we
831 # don't remove it too early
832 f.lastused = datetime.now()
833 DBConn().session().object_session(poolfile).add(f)
836 # Prepare BuildQueueFile object
837 qf = BuildQueueFile()
838 qf.build_queue_id = self.queue_id
839 qf.lastused = datetime.now()
840 qf.filename = poolfile_basename
842 targetpath = poolfile.fullpath
843 queuepath = os.path.join(self.path, poolfile_basename)
847 # We need to copy instead of symlink
849 utils.copy(targetpath, queuepath)
850 # NULL in the fileid field implies a copy
853 os.symlink(targetpath, queuepath)
854 qf.fileid = poolfile.file_id
858 # Get the same session as the PoolFile is using and add the qf to it
859 DBConn().session().object_session(poolfile).add(qf)
864 __all__.append('BuildQueue')
867 def get_build_queue(queuename, session=None):
869 Returns BuildQueue object for given C{queue name}, creating it if it does not
872 @type queuename: string
873 @param queuename: The name of the queue
875 @type session: Session
876 @param session: Optional SQLA session object (a temporary one will be
877 generated if not supplied)
880 @return: BuildQueue object for the given queue
883 q = session.query(BuildQueue).filter_by(queue_name=queuename)
887 except NoResultFound:
890 __all__.append('get_build_queue')
892 ################################################################################
894 class BuildQueueFile(object):
895 def __init__(self, *args, **kwargs):
899 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
903 return os.path.join(self.buildqueue.path, self.filename)
906 __all__.append('BuildQueueFile')
908 ################################################################################
910 class ChangePendingBinary(object):
911 def __init__(self, *args, **kwargs):
915 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
917 __all__.append('ChangePendingBinary')
919 ################################################################################
921 class ChangePendingFile(object):
922 def __init__(self, *args, **kwargs):
926 return '<ChangePendingFile %s>' % self.change_pending_file_id
928 __all__.append('ChangePendingFile')
930 ################################################################################
932 class ChangePendingSource(object):
933 def __init__(self, *args, **kwargs):
937 return '<ChangePendingSource %s>' % self.change_pending_source_id
939 __all__.append('ChangePendingSource')
941 ################################################################################
943 class Component(ORMObject):
944 def __init__(self, component_name = None):
945 self.component_name = component_name
947 def __eq__(self, val):
948 if isinstance(val, str):
949 return (self.component_name == val)
950 # This signals to use the normal comparison operator
951 return NotImplemented
953 def __ne__(self, val):
954 if isinstance(val, str):
955 return (self.component_name != val)
956 # This signals to use the normal comparison operator
957 return NotImplemented
959 def properties(self):
960 return ['component_name', 'component_id', 'description', \
961 'location_count', 'meets_dfsg', 'overrides_count']
963 def not_null_constraints(self):
964 return ['component_name']
967 __all__.append('Component')
970 def get_component(component, session=None):
972 Returns database id for given C{component}.
974 @type component: string
975 @param component: The name of the override type
978 @return: the database id for the given component
981 component = component.lower()
983 q = session.query(Component).filter_by(component_name=component)
987 except NoResultFound:
990 __all__.append('get_component')
992 ################################################################################
994 class DBConfig(object):
995 def __init__(self, *args, **kwargs):
999 return '<DBConfig %s>' % self.name
1001 __all__.append('DBConfig')
1003 ################################################################################
1006 def get_or_set_contents_file_id(filename, session=None):
1008 Returns database id for given filename.
1010 If no matching file is found, a row is inserted.
1012 @type filename: string
1013 @param filename: The filename
1014 @type session: SQLAlchemy
1015 @param session: Optional SQL session object (a temporary one will be
1016 generated if not supplied). If not passed, a commit will be performed at
1017 the end of the function, otherwise the caller is responsible for commiting.
1020 @return: the database id for the given component
1023 q = session.query(ContentFilename).filter_by(filename=filename)
1026 ret = q.one().cafilename_id
1027 except NoResultFound:
1028 cf = ContentFilename()
1029 cf.filename = filename
1031 session.commit_or_flush()
1032 ret = cf.cafilename_id
1036 __all__.append('get_or_set_contents_file_id')
1039 def get_contents(suite, overridetype, section=None, session=None):
1041 Returns contents for a suite / overridetype combination, limiting
1042 to a section if not None.
1045 @param suite: Suite object
1047 @type overridetype: OverrideType
1048 @param overridetype: OverrideType object
1050 @type section: Section
1051 @param section: Optional section object to limit results to
1053 @type session: SQLAlchemy
1054 @param session: Optional SQL session object (a temporary one will be
1055 generated if not supplied)
1057 @rtype: ResultsProxy
1058 @return: ResultsProxy object set up to return tuples of (filename, section,
1062 # find me all of the contents for a given suite
1063 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1067 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1068 JOIN content_file_names n ON (c.filename=n.id)
1069 JOIN binaries b ON (b.id=c.binary_pkg)
1070 JOIN override o ON (o.package=b.package)
1071 JOIN section s ON (s.id=o.section)
1072 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1073 AND b.type=:overridetypename"""
1075 vals = {'suiteid': suite.suite_id,
1076 'overridetypeid': overridetype.overridetype_id,
1077 'overridetypename': overridetype.overridetype}
1079 if section is not None:
1080 contents_q += " AND s.id = :sectionid"
1081 vals['sectionid'] = section.section_id
1083 contents_q += " ORDER BY fn"
1085 return session.execute(contents_q, vals)
1087 __all__.append('get_contents')
1089 ################################################################################
1091 class ContentFilepath(object):
1092 def __init__(self, *args, **kwargs):
1096 return '<ContentFilepath %s>' % self.filepath
1098 __all__.append('ContentFilepath')
1101 def get_or_set_contents_path_id(filepath, session=None):
1103 Returns database id for given path.
1105 If no matching file is found, a row is inserted.
1107 @type filepath: string
1108 @param filepath: The filepath
1110 @type session: SQLAlchemy
1111 @param session: Optional SQL session object (a temporary one will be
1112 generated if not supplied). If not passed, a commit will be performed at
1113 the end of the function, otherwise the caller is responsible for commiting.
1116 @return: the database id for the given path
1119 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1122 ret = q.one().cafilepath_id
1123 except NoResultFound:
1124 cf = ContentFilepath()
1125 cf.filepath = filepath
1127 session.commit_or_flush()
1128 ret = cf.cafilepath_id
1132 __all__.append('get_or_set_contents_path_id')
1134 ################################################################################
1136 class ContentAssociation(object):
1137 def __init__(self, *args, **kwargs):
1141 return '<ContentAssociation %s>' % self.ca_id
1143 __all__.append('ContentAssociation')
1145 def insert_content_paths(binary_id, fullpaths, session=None):
1147 Make sure given path is associated with given binary id
1149 @type binary_id: int
1150 @param binary_id: the id of the binary
1151 @type fullpaths: list
1152 @param fullpaths: the list of paths of the file being associated with the binary
1153 @type session: SQLAlchemy session
1154 @param session: Optional SQLAlchemy session. If this is passed, the caller
1155 is responsible for ensuring a transaction has begun and committing the
1156 results or rolling back based on the result code. If not passed, a commit
1157 will be performed at the end of the function, otherwise the caller is
1158 responsible for commiting.
1160 @return: True upon success
1163 privatetrans = False
1165 session = DBConn().session()
1170 def generate_path_dicts():
1171 for fullpath in fullpaths:
1172 if fullpath.startswith( './' ):
1173 fullpath = fullpath[2:]
1175 yield {'filename':fullpath, 'id': binary_id }
1177 for d in generate_path_dicts():
1178 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1187 traceback.print_exc()
1189 # Only rollback if we set up the session ourself
1196 __all__.append('insert_content_paths')
1198 ################################################################################
1200 class DSCFile(object):
1201 def __init__(self, *args, **kwargs):
1205 return '<DSCFile %s>' % self.dscfile_id
1207 __all__.append('DSCFile')
1210 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1212 Returns a list of DSCFiles which may be empty
1214 @type dscfile_id: int (optional)
1215 @param dscfile_id: the dscfile_id of the DSCFiles to find
1217 @type source_id: int (optional)
1218 @param source_id: the source id related to the DSCFiles to find
1220 @type poolfile_id: int (optional)
1221 @param poolfile_id: the poolfile id related to the DSCFiles to find
1224 @return: Possibly empty list of DSCFiles
1227 q = session.query(DSCFile)
1229 if dscfile_id is not None:
1230 q = q.filter_by(dscfile_id=dscfile_id)
1232 if source_id is not None:
1233 q = q.filter_by(source_id=source_id)
1235 if poolfile_id is not None:
1236 q = q.filter_by(poolfile_id=poolfile_id)
1240 __all__.append('get_dscfiles')
1242 ################################################################################
1244 class PoolFile(ORMObject):
1245 def __init__(self, filename = None, location = None, filesize = -1, \
1247 self.filename = filename
1248 self.location = location
1249 self.filesize = filesize
1250 self.md5sum = md5sum
1254 return os.path.join(self.location.path, self.filename)
1256 def is_valid(self, filesize = -1, md5sum = None):
1257 return self.filesize == long(filesize) and self.md5sum == md5sum
1259 def properties(self):
1260 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1261 'sha256sum', 'location', 'source', 'binary', 'last_used']
1263 def not_null_constraints(self):
1264 return ['filename', 'md5sum', 'location']
1266 __all__.append('PoolFile')
1269 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1272 (ValidFileFound [boolean], PoolFile object or None)
1274 @type filename: string
1275 @param filename: the filename of the file to check against the DB
1278 @param filesize: the size of the file to check against the DB
1280 @type md5sum: string
1281 @param md5sum: the md5sum of the file to check against the DB
1283 @type location_id: int
1284 @param location_id: the id of the location to look in
1287 @return: Tuple of length 2.
1288 - If valid pool file found: (C{True}, C{PoolFile object})
1289 - If valid pool file not found:
1290 - (C{False}, C{None}) if no file found
1291 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1294 poolfile = session.query(Location).get(location_id). \
1295 files.filter_by(filename=filename).first()
1297 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1300 return (valid, poolfile)
1302 __all__.append('check_poolfile')
1304 # TODO: the implementation can trivially be inlined at the place where the
1305 # function is called
1307 def get_poolfile_by_id(file_id, session=None):
1309 Returns a PoolFile objects or None for the given id
1312 @param file_id: the id of the file to look for
1314 @rtype: PoolFile or None
1315 @return: either the PoolFile object or None
1318 return session.query(PoolFile).get(file_id)
1320 __all__.append('get_poolfile_by_id')
1323 def get_poolfile_like_name(filename, session=None):
1325 Returns an array of PoolFile objects which are like the given name
1327 @type filename: string
1328 @param filename: the filename of the file to check against the DB
1331 @return: array of PoolFile objects
1334 # TODO: There must be a way of properly using bind parameters with %FOO%
1335 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1339 __all__.append('get_poolfile_like_name')
1342 def add_poolfile(filename, datadict, location_id, session=None):
1344 Add a new file to the pool
1346 @type filename: string
1347 @param filename: filename
1349 @type datadict: dict
1350 @param datadict: dict with needed data
1352 @type location_id: int
1353 @param location_id: database id of the location
1356 @return: the PoolFile object created
1358 poolfile = PoolFile()
1359 poolfile.filename = filename
1360 poolfile.filesize = datadict["size"]
1361 poolfile.md5sum = datadict["md5sum"]
1362 poolfile.sha1sum = datadict["sha1sum"]
1363 poolfile.sha256sum = datadict["sha256sum"]
1364 poolfile.location_id = location_id
1366 session.add(poolfile)
1367 # Flush to get a file id (NB: This is not a commit)
1372 __all__.append('add_poolfile')
1374 ################################################################################
1376 class Fingerprint(ORMObject):
1377 def __init__(self, fingerprint = None):
1378 self.fingerprint = fingerprint
1380 def properties(self):
1381 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1384 def not_null_constraints(self):
1385 return ['fingerprint']
1387 __all__.append('Fingerprint')
1390 def get_fingerprint(fpr, session=None):
1392 Returns Fingerprint object for given fpr.
1395 @param fpr: The fpr to find / add
1397 @type session: SQLAlchemy
1398 @param session: Optional SQL session object (a temporary one will be
1399 generated if not supplied).
1402 @return: the Fingerprint object for the given fpr or None
1405 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1409 except NoResultFound:
1414 __all__.append('get_fingerprint')
1417 def get_or_set_fingerprint(fpr, session=None):
1419 Returns Fingerprint object for given fpr.
1421 If no matching fpr is found, a row is inserted.
1424 @param fpr: The fpr to find / add
1426 @type session: SQLAlchemy
1427 @param session: Optional SQL session object (a temporary one will be
1428 generated if not supplied). If not passed, a commit will be performed at
1429 the end of the function, otherwise the caller is responsible for commiting.
1430 A flush will be performed either way.
1433 @return: the Fingerprint object for the given fpr
1436 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1440 except NoResultFound:
1441 fingerprint = Fingerprint()
1442 fingerprint.fingerprint = fpr
1443 session.add(fingerprint)
1444 session.commit_or_flush()
1449 __all__.append('get_or_set_fingerprint')
1451 ################################################################################
1453 # Helper routine for Keyring class
1454 def get_ldap_name(entry):
1456 for k in ["cn", "mn", "sn"]:
1458 if ret and ret[0] != "" and ret[0] != "-":
1460 return " ".join(name)
1462 ################################################################################
1464 class Keyring(object):
1465 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1466 " --with-colons --fingerprint --fingerprint"
1471 def __init__(self, *args, **kwargs):
1475 return '<Keyring %s>' % self.keyring_name
1477 def de_escape_gpg_str(self, txt):
1478 esclist = re.split(r'(\\x..)', txt)
1479 for x in range(1,len(esclist),2):
1480 esclist[x] = "%c" % (int(esclist[x][2:],16))
1481 return "".join(esclist)
1483 def parse_address(self, uid):
1484 """parses uid and returns a tuple of real name and email address"""
1486 (name, address) = email.Utils.parseaddr(uid)
1487 name = re.sub(r"\s*[(].*[)]", "", name)
1488 name = self.de_escape_gpg_str(name)
1491 return (name, address)
1493 def load_keys(self, keyring):
1494 if not self.keyring_id:
1495 raise Exception('Must be initialized with database information')
1497 k = os.popen(self.gpg_invocation % keyring, "r")
1501 for line in k.xreadlines():
1502 field = line.split(":")
1503 if field[0] == "pub":
1506 (name, addr) = self.parse_address(field[9])
1508 self.keys[key]["email"] = addr
1509 self.keys[key]["name"] = name
1510 self.keys[key]["fingerprints"] = []
1512 elif key and field[0] == "sub" and len(field) >= 12:
1513 signingkey = ("s" in field[11])
1514 elif key and field[0] == "uid":
1515 (name, addr) = self.parse_address(field[9])
1516 if "email" not in self.keys[key] and "@" in addr:
1517 self.keys[key]["email"] = addr
1518 self.keys[key]["name"] = name
1519 elif signingkey and field[0] == "fpr":
1520 self.keys[key]["fingerprints"].append(field[9])
1521 self.fpr_lookup[field[9]] = key
1523 def import_users_from_ldap(self, session):
1527 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1528 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1530 l = ldap.open(LDAPServer)
1531 l.simple_bind_s("","")
1532 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1533 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1534 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1536 ldap_fin_uid_id = {}
1543 uid = entry["uid"][0]
1544 name = get_ldap_name(entry)
1545 fingerprints = entry["keyFingerPrint"]
1547 for f in fingerprints:
1548 key = self.fpr_lookup.get(f, None)
1549 if key not in self.keys:
1551 self.keys[key]["uid"] = uid
1555 keyid = get_or_set_uid(uid, session).uid_id
1556 byuid[keyid] = (uid, name)
1557 byname[uid] = (keyid, name)
1559 return (byname, byuid)
1561 def generate_users_from_keyring(self, format, session):
1565 for x in self.keys.keys():
1566 if "email" not in self.keys[x]:
1568 self.keys[x]["uid"] = format % "invalid-uid"
1570 uid = format % self.keys[x]["email"]
1571 keyid = get_or_set_uid(uid, session).uid_id
1572 byuid[keyid] = (uid, self.keys[x]["name"])
1573 byname[uid] = (keyid, self.keys[x]["name"])
1574 self.keys[x]["uid"] = uid
1577 uid = format % "invalid-uid"
1578 keyid = get_or_set_uid(uid, session).uid_id
1579 byuid[keyid] = (uid, "ungeneratable user id")
1580 byname[uid] = (keyid, "ungeneratable user id")
1582 return (byname, byuid)
1584 __all__.append('Keyring')
1587 def get_keyring(keyring, session=None):
1589 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1590 If C{keyring} already has an entry, simply return the existing Keyring
1592 @type keyring: string
1593 @param keyring: the keyring name
1596 @return: the Keyring object for this keyring
1599 q = session.query(Keyring).filter_by(keyring_name=keyring)
1603 except NoResultFound:
1606 __all__.append('get_keyring')
1608 ################################################################################
1610 class KeyringACLMap(object):
1611 def __init__(self, *args, **kwargs):
1615 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1617 __all__.append('KeyringACLMap')
1619 ################################################################################
1621 class DBChange(object):
1622 def __init__(self, *args, **kwargs):
1626 return '<DBChange %s>' % self.changesname
1628 def clean_from_queue(self):
1629 session = DBConn().session().object_session(self)
1631 # Remove changes_pool_files entries
1634 # Remove changes_pending_files references
1637 # Clear out of queue
1638 self.in_queue = None
1639 self.approved_for_id = None
1641 __all__.append('DBChange')
1644 def get_dbchange(filename, session=None):
1646 returns DBChange object for given C{filename}.
1648 @type filename: string
1649 @param filename: the name of the file
1651 @type session: Session
1652 @param session: Optional SQLA session object (a temporary one will be
1653 generated if not supplied)
1656 @return: DBChange object for the given filename (C{None} if not present)
1659 q = session.query(DBChange).filter_by(changesname=filename)
1663 except NoResultFound:
1666 __all__.append('get_dbchange')
1668 ################################################################################
1670 class Location(ORMObject):
1671 def __init__(self, path = None, component = None):
1673 self.component = component
1674 # the column 'type' should go away, see comment at mapper
1675 self.archive_type = 'pool'
1677 def properties(self):
1678 return ['path', 'location_id', 'archive_type', 'component', \
1681 def not_null_constraints(self):
1682 return ['path', 'archive_type']
1684 __all__.append('Location')
1687 def get_location(location, component=None, archive=None, session=None):
1689 Returns Location object for the given combination of location, component
1692 @type location: string
1693 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1695 @type component: string
1696 @param component: the component name (if None, no restriction applied)
1698 @type archive: string
1699 @param archive: the archive name (if None, no restriction applied)
1701 @rtype: Location / None
1702 @return: Either a Location object or None if one can't be found
1705 q = session.query(Location).filter_by(path=location)
1707 if archive is not None:
1708 q = q.join(Archive).filter_by(archive_name=archive)
1710 if component is not None:
1711 q = q.join(Component).filter_by(component_name=component)
1715 except NoResultFound:
1718 __all__.append('get_location')
1720 ################################################################################
1722 class Maintainer(ORMObject):
1723 def __init__(self, name = None):
1726 def properties(self):
1727 return ['name', 'maintainer_id']
1729 def not_null_constraints(self):
1732 def get_split_maintainer(self):
1733 if not hasattr(self, 'name') or self.name is None:
1734 return ('', '', '', '')
1736 return fix_maintainer(self.name.strip())
1738 __all__.append('Maintainer')
1741 def get_or_set_maintainer(name, session=None):
1743 Returns Maintainer object for given maintainer name.
1745 If no matching maintainer name is found, a row is inserted.
1748 @param name: The maintainer name to add
1750 @type session: SQLAlchemy
1751 @param session: Optional SQL session object (a temporary one will be
1752 generated if not supplied). If not passed, a commit will be performed at
1753 the end of the function, otherwise the caller is responsible for commiting.
1754 A flush will be performed either way.
1757 @return: the Maintainer object for the given maintainer
1760 q = session.query(Maintainer).filter_by(name=name)
1763 except NoResultFound:
1764 maintainer = Maintainer()
1765 maintainer.name = name
1766 session.add(maintainer)
1767 session.commit_or_flush()
1772 __all__.append('get_or_set_maintainer')
1775 def get_maintainer(maintainer_id, session=None):
1777 Return the name of the maintainer behind C{maintainer_id} or None if that
1778 maintainer_id is invalid.
1780 @type maintainer_id: int
1781 @param maintainer_id: the id of the maintainer
1784 @return: the Maintainer with this C{maintainer_id}
1787 return session.query(Maintainer).get(maintainer_id)
1789 __all__.append('get_maintainer')
1791 ################################################################################
1793 class NewComment(object):
1794 def __init__(self, *args, **kwargs):
1798 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1800 __all__.append('NewComment')
1803 def has_new_comment(package, version, session=None):
1805 Returns true if the given combination of C{package}, C{version} has a comment.
1807 @type package: string
1808 @param package: name of the package
1810 @type version: string
1811 @param version: package version
1813 @type session: Session
1814 @param session: Optional SQLA session object (a temporary one will be
1815 generated if not supplied)
1821 q = session.query(NewComment)
1822 q = q.filter_by(package=package)
1823 q = q.filter_by(version=version)
1825 return bool(q.count() > 0)
1827 __all__.append('has_new_comment')
1830 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1832 Returns (possibly empty) list of NewComment objects for the given
1835 @type package: string (optional)
1836 @param package: name of the package
1838 @type version: string (optional)
1839 @param version: package version
1841 @type comment_id: int (optional)
1842 @param comment_id: An id of a comment
1844 @type session: Session
1845 @param session: Optional SQLA session object (a temporary one will be
1846 generated if not supplied)
1849 @return: A (possibly empty) list of NewComment objects will be returned
1852 q = session.query(NewComment)
1853 if package is not None: q = q.filter_by(package=package)
1854 if version is not None: q = q.filter_by(version=version)
1855 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1859 __all__.append('get_new_comments')
1861 ################################################################################
1863 class Override(ORMObject):
1864 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1865 section = None, priority = None):
1866 self.package = package
1868 self.component = component
1869 self.overridetype = overridetype
1870 self.section = section
1871 self.priority = priority
1873 def properties(self):
1874 return ['package', 'suite', 'component', 'overridetype', 'section', \
1877 def not_null_constraints(self):
1878 return ['package', 'suite', 'component', 'overridetype', 'section']
1880 __all__.append('Override')
1883 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1885 Returns Override object for the given parameters
1887 @type package: string
1888 @param package: The name of the package
1890 @type suite: string, list or None
1891 @param suite: The name of the suite (or suites if a list) to limit to. If
1892 None, don't limit. Defaults to None.
1894 @type component: string, list or None
1895 @param component: The name of the component (or components if a list) to
1896 limit to. If None, don't limit. Defaults to None.
1898 @type overridetype: string, list or None
1899 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1900 limit to. If None, don't limit. Defaults to None.
1902 @type session: Session
1903 @param session: Optional SQLA session object (a temporary one will be
1904 generated if not supplied)
1907 @return: A (possibly empty) list of Override objects will be returned
1910 q = session.query(Override)
1911 q = q.filter_by(package=package)
1913 if suite is not None:
1914 if not isinstance(suite, list): suite = [suite]
1915 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1917 if component is not None:
1918 if not isinstance(component, list): component = [component]
1919 q = q.join(Component).filter(Component.component_name.in_(component))
1921 if overridetype is not None:
1922 if not isinstance(overridetype, list): overridetype = [overridetype]
1923 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1927 __all__.append('get_override')
1930 ################################################################################
1932 class OverrideType(ORMObject):
1933 def __init__(self, overridetype = None):
1934 self.overridetype = overridetype
1936 def properties(self):
1937 return ['overridetype', 'overridetype_id', 'overrides_count']
1939 def not_null_constraints(self):
1940 return ['overridetype']
1942 __all__.append('OverrideType')
1945 def get_override_type(override_type, session=None):
1947 Returns OverrideType object for given C{override type}.
1949 @type override_type: string
1950 @param override_type: The name of the override type
1952 @type session: Session
1953 @param session: Optional SQLA session object (a temporary one will be
1954 generated if not supplied)
1957 @return: the database id for the given override type
1960 q = session.query(OverrideType).filter_by(overridetype=override_type)
1964 except NoResultFound:
1967 __all__.append('get_override_type')
1969 ################################################################################
1971 class PolicyQueue(object):
1972 def __init__(self, *args, **kwargs):
1976 return '<PolicyQueue %s>' % self.queue_name
1978 __all__.append('PolicyQueue')
1981 def get_policy_queue(queuename, session=None):
1983 Returns PolicyQueue object for given C{queue name}
1985 @type queuename: string
1986 @param queuename: The name of the queue
1988 @type session: Session
1989 @param session: Optional SQLA session object (a temporary one will be
1990 generated if not supplied)
1993 @return: PolicyQueue object for the given queue
1996 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2000 except NoResultFound:
2003 __all__.append('get_policy_queue')
2006 def get_policy_queue_from_path(pathname, session=None):
2008 Returns PolicyQueue object for given C{path name}
2010 @type queuename: string
2011 @param queuename: The path
2013 @type session: Session
2014 @param session: Optional SQLA session object (a temporary one will be
2015 generated if not supplied)
2018 @return: PolicyQueue object for the given queue
2021 q = session.query(PolicyQueue).filter_by(path=pathname)
2025 except NoResultFound:
2028 __all__.append('get_policy_queue_from_path')
2030 ################################################################################
2032 class Priority(ORMObject):
2033 def __init__(self, priority = None, level = None):
2034 self.priority = priority
2037 def properties(self):
2038 return ['priority', 'priority_id', 'level', 'overrides_count']
2040 def not_null_constraints(self):
2041 return ['priority', 'level']
2043 def __eq__(self, val):
2044 if isinstance(val, str):
2045 return (self.priority == val)
2046 # This signals to use the normal comparison operator
2047 return NotImplemented
2049 def __ne__(self, val):
2050 if isinstance(val, str):
2051 return (self.priority != val)
2052 # This signals to use the normal comparison operator
2053 return NotImplemented
2055 __all__.append('Priority')
2058 def get_priority(priority, session=None):
2060 Returns Priority object for given C{priority name}.
2062 @type priority: string
2063 @param priority: The name of the priority
2065 @type session: Session
2066 @param session: Optional SQLA session object (a temporary one will be
2067 generated if not supplied)
2070 @return: Priority object for the given priority
2073 q = session.query(Priority).filter_by(priority=priority)
2077 except NoResultFound:
2080 __all__.append('get_priority')
2083 def get_priorities(session=None):
2085 Returns dictionary of priority names -> id mappings
2087 @type session: Session
2088 @param session: Optional SQL session object (a temporary one will be
2089 generated if not supplied)
2092 @return: dictionary of priority names -> id mappings
2096 q = session.query(Priority)
2098 ret[x.priority] = x.priority_id
2102 __all__.append('get_priorities')
2104 ################################################################################
2106 class Section(ORMObject):
2107 def __init__(self, section = None):
2108 self.section = section
2110 def properties(self):
2111 return ['section', 'section_id', 'overrides_count']
2113 def not_null_constraints(self):
2116 def __eq__(self, val):
2117 if isinstance(val, str):
2118 return (self.section == val)
2119 # This signals to use the normal comparison operator
2120 return NotImplemented
2122 def __ne__(self, val):
2123 if isinstance(val, str):
2124 return (self.section != val)
2125 # This signals to use the normal comparison operator
2126 return NotImplemented
2128 __all__.append('Section')
2131 def get_section(section, session=None):
2133 Returns Section object for given C{section name}.
2135 @type section: string
2136 @param section: The name of the section
2138 @type session: Session
2139 @param session: Optional SQLA session object (a temporary one will be
2140 generated if not supplied)
2143 @return: Section object for the given section name
2146 q = session.query(Section).filter_by(section=section)
2150 except NoResultFound:
2153 __all__.append('get_section')
2156 def get_sections(session=None):
2158 Returns dictionary of section names -> id mappings
2160 @type session: Session
2161 @param session: Optional SQL session object (a temporary one will be
2162 generated if not supplied)
2165 @return: dictionary of section names -> id mappings
2169 q = session.query(Section)
2171 ret[x.section] = x.section_id
2175 __all__.append('get_sections')
2177 ################################################################################
2179 class DBSource(ORMObject):
2180 def __init__(self, source = None, version = None, maintainer = None, \
2181 changedby = None, poolfile = None, install_date = None):
2182 self.source = source
2183 self.version = version
2184 self.maintainer = maintainer
2185 self.changedby = changedby
2186 self.poolfile = poolfile
2187 self.install_date = install_date
2189 def properties(self):
2190 return ['source', 'source_id', 'maintainer', 'changedby', \
2191 'fingerprint', 'poolfile', 'version', 'suites_count', \
2192 'install_date', 'binaries_count']
2194 def not_null_constraints(self):
2195 return ['source', 'version', 'install_date', 'maintainer', \
2196 'changedby', 'poolfile', 'install_date']
2198 def read_control(self):
2200 Reads the control information from a dsc
2203 @return: (stanza, controldict) stanza is the text of the control
2204 section. controldict is the information in a dictionary
2207 from debian.debfile import Deb822
2208 fullpath = self.poolfile.fullpath
2209 fields = Deb822(open(self.poolfile.fullpath, 'r'))
2212 metadata = association_proxy('key', 'value')
2214 __all__.append('DBSource')
2217 def source_exists(source, source_version, suites = ["any"], session=None):
2219 Ensure that source exists somewhere in the archive for the binary
2220 upload being processed.
2221 1. exact match => 1.0-3
2222 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2224 @type source: string
2225 @param source: source name
2227 @type source_version: string
2228 @param source_version: expected source version
2231 @param suites: list of suites to check in, default I{any}
2233 @type session: Session
2234 @param session: Optional SQLA session object (a temporary one will be
2235 generated if not supplied)
2238 @return: returns 1 if a source with expected version is found, otherwise 0
2245 from daklib.regexes import re_bin_only_nmu
2246 orig_source_version = re_bin_only_nmu.sub('', source_version)
2248 for suite in suites:
2249 q = session.query(DBSource).filter_by(source=source). \
2250 filter(DBSource.version.in_([source_version, orig_source_version]))
2252 # source must exist in suite X, or in some other suite that's
2253 # mapped to X, recursively... silent-maps are counted too,
2254 # unreleased-maps aren't.
2255 maps = cnf.ValueList("SuiteMappings")[:]
2257 maps = [ m.split() for m in maps ]
2258 maps = [ (x[1], x[2]) for x in maps
2259 if x[0] == "map" or x[0] == "silent-map" ]
2261 for (from_, to) in maps:
2262 if from_ in s and to not in s:
2265 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2270 # No source found so return not ok
2275 __all__.append('source_exists')
2278 def get_suites_source_in(source, session=None):
2280 Returns list of Suite objects which given C{source} name is in
2283 @param source: DBSource package name to search for
2286 @return: list of Suite objects for the given source
2289 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2291 __all__.append('get_suites_source_in')
2294 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2296 Returns list of DBSource objects for given C{source} name and other parameters
2299 @param source: DBSource package name to search for
2301 @type version: str or None
2302 @param version: DBSource version name to search for or None if not applicable
2304 @type dm_upload_allowed: bool
2305 @param dm_upload_allowed: If None, no effect. If True or False, only
2306 return packages with that dm_upload_allowed setting
2308 @type session: Session
2309 @param session: Optional SQL session object (a temporary one will be
2310 generated if not supplied)
2313 @return: list of DBSource objects for the given name (may be empty)
2316 q = session.query(DBSource).filter_by(source=source)
2318 if version is not None:
2319 q = q.filter_by(version=version)
2321 if dm_upload_allowed is not None:
2322 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2326 __all__.append('get_sources_from_name')
2328 # FIXME: This function fails badly if it finds more than 1 source package and
2329 # its implementation is trivial enough to be inlined.
2331 def get_source_in_suite(source, suite, session=None):
2333 Returns a DBSource object for a combination of C{source} and C{suite}.
2335 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2336 - B{suite} - a suite name, eg. I{unstable}
2338 @type source: string
2339 @param source: source package name
2342 @param suite: the suite name
2345 @return: the version for I{source} in I{suite}
2349 q = get_suite(suite, session).get_sources(source)
2352 except NoResultFound:
2355 __all__.append('get_source_in_suite')
2357 ################################################################################
2360 def add_dsc_to_db(u, filename, session=None):
2361 entry = u.pkg.files[filename]
2365 source.source = u.pkg.dsc["source"]
2366 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2367 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2368 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2369 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2370 source.install_date = datetime.now().date()
2372 dsc_component = entry["component"]
2373 dsc_location_id = entry["location id"]
2375 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2377 # Set up a new poolfile if necessary
2378 if not entry.has_key("files id") or not entry["files id"]:
2379 filename = entry["pool name"] + filename
2380 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2382 pfs.append(poolfile)
2383 entry["files id"] = poolfile.file_id
2385 source.poolfile_id = entry["files id"]
2388 suite_names = u.pkg.changes["distribution"].keys()
2389 source.suites = session.query(Suite). \
2390 filter(Suite.suite_name.in_(suite_names)).all()
2392 # Add the source files to the DB (files and dsc_files)
2394 dscfile.source_id = source.source_id
2395 dscfile.poolfile_id = entry["files id"]
2396 session.add(dscfile)
2398 for dsc_file, dentry in u.pkg.dsc_files.items():
2400 df.source_id = source.source_id
2402 # If the .orig tarball is already in the pool, it's
2403 # files id is stored in dsc_files by check_dsc().
2404 files_id = dentry.get("files id", None)
2406 # Find the entry in the files hash
2407 # TODO: Bail out here properly
2409 for f, e in u.pkg.files.items():
2414 if files_id is None:
2415 filename = dfentry["pool name"] + dsc_file
2417 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2418 # FIXME: needs to check for -1/-2 and or handle exception
2419 if found and obj is not None:
2420 files_id = obj.file_id
2423 # If still not found, add it
2424 if files_id is None:
2425 # HACK: Force sha1sum etc into dentry
2426 dentry["sha1sum"] = dfentry["sha1sum"]
2427 dentry["sha256sum"] = dfentry["sha256sum"]
2428 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2429 pfs.append(poolfile)
2430 files_id = poolfile.file_id
2432 poolfile = get_poolfile_by_id(files_id, session)
2433 if poolfile is None:
2434 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2435 pfs.append(poolfile)
2437 df.poolfile_id = files_id
2440 # Add the src_uploaders to the DB
2441 uploader_ids = [source.maintainer_id]
2442 if u.pkg.dsc.has_key("uploaders"):
2443 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2445 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2448 for up_id in uploader_ids:
2449 if added_ids.has_key(up_id):
2451 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2457 su.maintainer_id = up_id
2458 su.source_id = source.source_id
2463 return source, dsc_component, dsc_location_id, pfs
2465 __all__.append('add_dsc_to_db')
2468 def add_deb_to_db(u, filename, session=None):
2470 Contrary to what you might expect, this routine deals with both
2471 debs and udebs. That info is in 'dbtype', whilst 'type' is
2472 'deb' for both of them
2475 entry = u.pkg.files[filename]
2478 bin.package = entry["package"]
2479 bin.version = entry["version"]
2480 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2481 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2482 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2483 bin.binarytype = entry["dbtype"]
2486 filename = entry["pool name"] + filename
2487 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2488 if not entry.get("location id", None):
2489 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2491 if entry.get("files id", None):
2492 poolfile = get_poolfile_by_id(bin.poolfile_id)
2493 bin.poolfile_id = entry["files id"]
2495 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2496 bin.poolfile_id = entry["files id"] = poolfile.file_id
2499 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2500 if len(bin_sources) != 1:
2501 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2502 (bin.package, bin.version, entry["architecture"],
2503 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2505 bin.source_id = bin_sources[0].source_id
2507 if entry.has_key("built-using"):
2508 for srcname, version in entry["built-using"]:
2509 exsources = get_sources_from_name(srcname, version, session=session)
2510 if len(exsources) != 1:
2511 raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2512 (srcname, version, bin.package, bin.version, entry["architecture"],
2513 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2515 bin.extra_sources.append(exsources[0])
2517 # Add and flush object so it has an ID
2520 suite_names = u.pkg.changes["distribution"].keys()
2521 bin.suites = session.query(Suite). \
2522 filter(Suite.suite_name.in_(suite_names)).all()
2526 # Deal with contents - disabled for now
2527 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2529 # print "REJECT\nCould not determine contents of package %s" % bin.package
2530 # session.rollback()
2531 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2535 __all__.append('add_deb_to_db')
2537 ################################################################################
2539 class SourceACL(object):
2540 def __init__(self, *args, **kwargs):
2544 return '<SourceACL %s>' % self.source_acl_id
2546 __all__.append('SourceACL')
2548 ################################################################################
2550 class SrcFormat(object):
2551 def __init__(self, *args, **kwargs):
2555 return '<SrcFormat %s>' % (self.format_name)
2557 __all__.append('SrcFormat')
2559 ################################################################################
2561 class SrcUploader(object):
2562 def __init__(self, *args, **kwargs):
2566 return '<SrcUploader %s>' % self.uploader_id
2568 __all__.append('SrcUploader')
2570 ################################################################################
2572 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2573 ('SuiteID', 'suite_id'),
2574 ('Version', 'version'),
2575 ('Origin', 'origin'),
2577 ('Description', 'description'),
2578 ('Untouchable', 'untouchable'),
2579 ('Announce', 'announce'),
2580 ('Codename', 'codename'),
2581 ('OverrideCodename', 'overridecodename'),
2582 ('ValidTime', 'validtime'),
2583 ('Priority', 'priority'),
2584 ('NotAutomatic', 'notautomatic'),
2585 ('CopyChanges', 'copychanges'),
2586 ('OverrideSuite', 'overridesuite')]
2588 # Why the heck don't we have any UNIQUE constraints in table suite?
2589 # TODO: Add UNIQUE constraints for appropriate columns.
2590 class Suite(ORMObject):
2591 def __init__(self, suite_name = None, version = None):
2592 self.suite_name = suite_name
2593 self.version = version
2595 def properties(self):
2596 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2599 def not_null_constraints(self):
2600 return ['suite_name', 'version']
2602 def __eq__(self, val):
2603 if isinstance(val, str):
2604 return (self.suite_name == val)
2605 # This signals to use the normal comparison operator
2606 return NotImplemented
2608 def __ne__(self, val):
2609 if isinstance(val, str):
2610 return (self.suite_name != val)
2611 # This signals to use the normal comparison operator
2612 return NotImplemented
2616 for disp, field in SUITE_FIELDS:
2617 val = getattr(self, field, None)
2619 ret.append("%s: %s" % (disp, val))
2621 return "\n".join(ret)
2623 def get_architectures(self, skipsrc=False, skipall=False):
2625 Returns list of Architecture objects
2627 @type skipsrc: boolean
2628 @param skipsrc: Whether to skip returning the 'source' architecture entry
2631 @type skipall: boolean
2632 @param skipall: Whether to skip returning the 'all' architecture entry
2636 @return: list of Architecture objects for the given name (may be empty)
2639 q = object_session(self).query(Architecture).with_parent(self)
2641 q = q.filter(Architecture.arch_string != 'source')
2643 q = q.filter(Architecture.arch_string != 'all')
2644 return q.order_by(Architecture.arch_string).all()
2646 def get_sources(self, source):
2648 Returns a query object representing DBSource that is part of C{suite}.
2650 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2652 @type source: string
2653 @param source: source package name
2655 @rtype: sqlalchemy.orm.query.Query
2656 @return: a query of DBSource
2660 session = object_session(self)
2661 return session.query(DBSource).filter_by(source = source). \
2664 __all__.append('Suite')
2667 def get_suite(suite, session=None):
2669 Returns Suite object for given C{suite name}.
2672 @param suite: The name of the suite
2674 @type session: Session
2675 @param session: Optional SQLA session object (a temporary one will be
2676 generated if not supplied)
2679 @return: Suite object for the requested suite name (None if not present)
2682 q = session.query(Suite).filter_by(suite_name=suite)
2686 except NoResultFound:
2689 __all__.append('get_suite')
2691 ################################################################################
2693 # TODO: should be removed because the implementation is too trivial
2695 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2697 Returns list of Architecture objects for given C{suite} name
2700 @param suite: Suite name to search for
2702 @type skipsrc: boolean
2703 @param skipsrc: Whether to skip returning the 'source' architecture entry
2706 @type skipall: boolean
2707 @param skipall: Whether to skip returning the 'all' architecture entry
2710 @type session: Session
2711 @param session: Optional SQL session object (a temporary one will be
2712 generated if not supplied)
2715 @return: list of Architecture objects for the given name (may be empty)
2718 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2720 __all__.append('get_suite_architectures')
2722 ################################################################################
2724 class SuiteSrcFormat(object):
2725 def __init__(self, *args, **kwargs):
2729 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2731 __all__.append('SuiteSrcFormat')
2734 def get_suite_src_formats(suite, session=None):
2736 Returns list of allowed SrcFormat for C{suite}.
2739 @param suite: Suite name to search for
2741 @type session: Session
2742 @param session: Optional SQL session object (a temporary one will be
2743 generated if not supplied)
2746 @return: the list of allowed source formats for I{suite}
2749 q = session.query(SrcFormat)
2750 q = q.join(SuiteSrcFormat)
2751 q = q.join(Suite).filter_by(suite_name=suite)
2752 q = q.order_by('format_name')
2756 __all__.append('get_suite_src_formats')
2758 ################################################################################
2760 class Uid(ORMObject):
2761 def __init__(self, uid = None, name = None):
2765 def __eq__(self, val):
2766 if isinstance(val, str):
2767 return (self.uid == val)
2768 # This signals to use the normal comparison operator
2769 return NotImplemented
2771 def __ne__(self, val):
2772 if isinstance(val, str):
2773 return (self.uid != val)
2774 # This signals to use the normal comparison operator
2775 return NotImplemented
2777 def properties(self):
2778 return ['uid', 'name', 'fingerprint']
2780 def not_null_constraints(self):
2783 __all__.append('Uid')
2786 def get_or_set_uid(uidname, session=None):
2788 Returns uid object for given uidname.
2790 If no matching uidname is found, a row is inserted.
2792 @type uidname: string
2793 @param uidname: The uid to add
2795 @type session: SQLAlchemy
2796 @param session: Optional SQL session object (a temporary one will be
2797 generated if not supplied). If not passed, a commit will be performed at
2798 the end of the function, otherwise the caller is responsible for commiting.
2801 @return: the uid object for the given uidname
2804 q = session.query(Uid).filter_by(uid=uidname)
2808 except NoResultFound:
2812 session.commit_or_flush()
2817 __all__.append('get_or_set_uid')
2820 def get_uid_from_fingerprint(fpr, session=None):
2821 q = session.query(Uid)
2822 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2826 except NoResultFound:
2829 __all__.append('get_uid_from_fingerprint')
2831 ################################################################################
2833 class UploadBlock(object):
2834 def __init__(self, *args, **kwargs):
2838 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2840 __all__.append('UploadBlock')
2842 ################################################################################
2844 class MetadataKey(ORMObject):
2845 def __init__(self, key = None):
2848 def properties(self):
2851 def not_null_constraints(self):
2854 __all__.append('MetadataKey')
2856 ################################################################################
2858 class BinaryMetadata(ORMObject):
2859 def __init__(self, key = None, value = None, binary = None):
2862 self.binary = binary
2864 def properties(self):
2865 return ['binary', 'key', 'value']
2867 def not_null_constraints(self):
2870 __all__.append('BinaryMetadata')
2872 ################################################################################
2874 class SourceMetadata(ORMObject):
2875 def __init__(self, key = None, value = None, source = None):
2878 self.source = source
2880 def properties(self):
2881 return ['source', 'key', 'value']
2883 def not_null_constraints(self):
2886 __all__.append('SourceMetadata')
2888 ################################################################################
2890 class DBConn(object):
2892 database module init.
2896 def __init__(self, *args, **kwargs):
2897 self.__dict__ = self.__shared_state
2899 if not getattr(self, 'initialised', False):
2900 self.initialised = True
2901 self.debug = kwargs.has_key('debug')
2904 def __setuptables(self):
2911 'binaries_metadata',
2915 'build_queue_files',
2920 'changes_pending_binaries',
2921 'changes_pending_files',
2922 'changes_pending_source',
2923 'changes_pending_files_map',
2924 'changes_pending_source_files',
2925 'changes_pool_files',
2927 'extra_src_references',
2936 # TODO: the maintainer column in table override should be removed.
2949 'suite_architectures',
2950 'suite_build_queue_copy',
2951 'suite_src_formats',
2957 'almost_obsolete_all_associations',
2958 'almost_obsolete_src_associations',
2959 'any_associations_source',
2960 'bin_assoc_by_arch',
2961 'bin_associations_binaries',
2962 'binaries_suite_arch',
2963 'binfiles_suite_component_arch',
2966 'newest_all_associations',
2967 'newest_any_associations',
2969 'newest_src_association',
2970 'obsolete_all_associations',
2971 'obsolete_any_associations',
2972 'obsolete_any_by_all_associations',
2973 'obsolete_src_associations',
2975 'src_associations_bin',
2976 'src_associations_src',
2977 'suite_arch_by_name',
2980 for table_name in tables:
2981 table = Table(table_name, self.db_meta, \
2982 autoload=True, useexisting=True)
2983 setattr(self, 'tbl_%s' % table_name, table)
2985 for view_name in views:
2986 view = Table(view_name, self.db_meta, autoload=True)
2987 setattr(self, 'view_%s' % view_name, view)
2989 def __setupmappers(self):
2990 mapper(Architecture, self.tbl_architecture,
2991 properties = dict(arch_id = self.tbl_architecture.c.id,
2992 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2993 order_by='suite_name',
2994 backref=backref('architectures', order_by='arch_string'))),
2995 extension = validator)
2997 mapper(Archive, self.tbl_archive,
2998 properties = dict(archive_id = self.tbl_archive.c.id,
2999 archive_name = self.tbl_archive.c.name))
3001 mapper(BuildQueue, self.tbl_build_queue,
3002 properties = dict(queue_id = self.tbl_build_queue.c.id))
3004 mapper(BuildQueueFile, self.tbl_build_queue_files,
3005 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3006 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3008 mapper(DBBinary, self.tbl_binaries,
3009 properties = dict(binary_id = self.tbl_binaries.c.id,
3010 package = self.tbl_binaries.c.package,
3011 version = self.tbl_binaries.c.version,
3012 maintainer_id = self.tbl_binaries.c.maintainer,
3013 maintainer = relation(Maintainer),
3014 source_id = self.tbl_binaries.c.source,
3015 source = relation(DBSource, backref='binaries'),
3016 arch_id = self.tbl_binaries.c.architecture,
3017 architecture = relation(Architecture),
3018 poolfile_id = self.tbl_binaries.c.file,
3019 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3020 binarytype = self.tbl_binaries.c.type,
3021 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3022 fingerprint = relation(Fingerprint),
3023 install_date = self.tbl_binaries.c.install_date,
3024 suites = relation(Suite, secondary=self.tbl_bin_associations,
3025 backref=backref('binaries', lazy='dynamic')),
3026 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3027 backref=backref('extra_binary_references', lazy='dynamic')),
3028 key = relation(BinaryMetadata, cascade='all',
3029 collection_class=attribute_mapped_collection('key'))),
3030 extension = validator)
3032 mapper(BinaryACL, self.tbl_binary_acl,
3033 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3035 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3036 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3037 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3038 architecture = relation(Architecture)))
3040 mapper(Component, self.tbl_component,
3041 properties = dict(component_id = self.tbl_component.c.id,
3042 component_name = self.tbl_component.c.name),
3043 extension = validator)
3045 mapper(DBConfig, self.tbl_config,
3046 properties = dict(config_id = self.tbl_config.c.id))
3048 mapper(DSCFile, self.tbl_dsc_files,
3049 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3050 source_id = self.tbl_dsc_files.c.source,
3051 source = relation(DBSource),
3052 poolfile_id = self.tbl_dsc_files.c.file,
3053 poolfile = relation(PoolFile)))
3055 mapper(PoolFile, self.tbl_files,
3056 properties = dict(file_id = self.tbl_files.c.id,
3057 filesize = self.tbl_files.c.size,
3058 location_id = self.tbl_files.c.location,
3059 location = relation(Location,
3060 # using lazy='dynamic' in the back
3061 # reference because we have A LOT of
3062 # files in one location
3063 backref=backref('files', lazy='dynamic'))),
3064 extension = validator)
3066 mapper(Fingerprint, self.tbl_fingerprint,
3067 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3068 uid_id = self.tbl_fingerprint.c.uid,
3069 uid = relation(Uid),
3070 keyring_id = self.tbl_fingerprint.c.keyring,
3071 keyring = relation(Keyring),
3072 source_acl = relation(SourceACL),
3073 binary_acl = relation(BinaryACL)),
3074 extension = validator)
3076 mapper(Keyring, self.tbl_keyrings,
3077 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3078 keyring_id = self.tbl_keyrings.c.id))
3080 mapper(DBChange, self.tbl_changes,
3081 properties = dict(change_id = self.tbl_changes.c.id,
3082 poolfiles = relation(PoolFile,
3083 secondary=self.tbl_changes_pool_files,
3084 backref="changeslinks"),
3085 seen = self.tbl_changes.c.seen,
3086 source = self.tbl_changes.c.source,
3087 binaries = self.tbl_changes.c.binaries,
3088 architecture = self.tbl_changes.c.architecture,
3089 distribution = self.tbl_changes.c.distribution,
3090 urgency = self.tbl_changes.c.urgency,
3091 maintainer = self.tbl_changes.c.maintainer,
3092 changedby = self.tbl_changes.c.changedby,
3093 date = self.tbl_changes.c.date,
3094 version = self.tbl_changes.c.version,
3095 files = relation(ChangePendingFile,
3096 secondary=self.tbl_changes_pending_files_map,
3097 backref="changesfile"),
3098 in_queue_id = self.tbl_changes.c.in_queue,
3099 in_queue = relation(PolicyQueue,
3100 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3101 approved_for_id = self.tbl_changes.c.approved_for))
3103 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3104 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3106 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3107 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3108 filename = self.tbl_changes_pending_files.c.filename,
3109 size = self.tbl_changes_pending_files.c.size,
3110 md5sum = self.tbl_changes_pending_files.c.md5sum,
3111 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3112 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3114 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3115 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3116 change = relation(DBChange),
3117 maintainer = relation(Maintainer,
3118 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3119 changedby = relation(Maintainer,
3120 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3121 fingerprint = relation(Fingerprint),
3122 source_files = relation(ChangePendingFile,
3123 secondary=self.tbl_changes_pending_source_files,
3124 backref="pending_sources")))
3127 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3128 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3129 keyring = relation(Keyring, backref="keyring_acl_map"),
3130 architecture = relation(Architecture)))
3132 mapper(Location, self.tbl_location,
3133 properties = dict(location_id = self.tbl_location.c.id,
3134 component_id = self.tbl_location.c.component,
3135 component = relation(Component, backref='location'),
3136 archive_id = self.tbl_location.c.archive,
3137 archive = relation(Archive),
3138 # FIXME: the 'type' column is old cruft and
3139 # should be removed in the future.
3140 archive_type = self.tbl_location.c.type),
3141 extension = validator)
3143 mapper(Maintainer, self.tbl_maintainer,
3144 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3145 maintains_sources = relation(DBSource, backref='maintainer',
3146 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3147 changed_sources = relation(DBSource, backref='changedby',
3148 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3149 extension = validator)
3151 mapper(NewComment, self.tbl_new_comments,
3152 properties = dict(comment_id = self.tbl_new_comments.c.id))
3154 mapper(Override, self.tbl_override,
3155 properties = dict(suite_id = self.tbl_override.c.suite,
3156 suite = relation(Suite, \
3157 backref=backref('overrides', lazy='dynamic')),
3158 package = self.tbl_override.c.package,
3159 component_id = self.tbl_override.c.component,
3160 component = relation(Component, \
3161 backref=backref('overrides', lazy='dynamic')),
3162 priority_id = self.tbl_override.c.priority,
3163 priority = relation(Priority, \
3164 backref=backref('overrides', lazy='dynamic')),
3165 section_id = self.tbl_override.c.section,
3166 section = relation(Section, \
3167 backref=backref('overrides', lazy='dynamic')),
3168 overridetype_id = self.tbl_override.c.type,
3169 overridetype = relation(OverrideType, \
3170 backref=backref('overrides', lazy='dynamic'))))
3172 mapper(OverrideType, self.tbl_override_type,
3173 properties = dict(overridetype = self.tbl_override_type.c.type,
3174 overridetype_id = self.tbl_override_type.c.id))
3176 mapper(PolicyQueue, self.tbl_policy_queue,
3177 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3179 mapper(Priority, self.tbl_priority,
3180 properties = dict(priority_id = self.tbl_priority.c.id))
3182 mapper(Section, self.tbl_section,
3183 properties = dict(section_id = self.tbl_section.c.id,
3184 section=self.tbl_section.c.section))
3186 mapper(DBSource, self.tbl_source,
3187 properties = dict(source_id = self.tbl_source.c.id,
3188 version = self.tbl_source.c.version,
3189 maintainer_id = self.tbl_source.c.maintainer,
3190 poolfile_id = self.tbl_source.c.file,
3191 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3192 fingerprint_id = self.tbl_source.c.sig_fpr,
3193 fingerprint = relation(Fingerprint),
3194 changedby_id = self.tbl_source.c.changedby,
3195 srcfiles = relation(DSCFile,
3196 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3197 suites = relation(Suite, secondary=self.tbl_src_associations,
3198 backref=backref('sources', lazy='dynamic')),
3199 srcuploaders = relation(SrcUploader),
3200 key = relation(SourceMetadata, cascade='all',
3201 collection_class=attribute_mapped_collection('key'))),
3202 extension = validator)
3204 mapper(SourceACL, self.tbl_source_acl,
3205 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3207 mapper(SrcFormat, self.tbl_src_format,
3208 properties = dict(src_format_id = self.tbl_src_format.c.id,
3209 format_name = self.tbl_src_format.c.format_name))
3211 mapper(SrcUploader, self.tbl_src_uploaders,
3212 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3213 source_id = self.tbl_src_uploaders.c.source,
3214 source = relation(DBSource,
3215 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3216 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3217 maintainer = relation(Maintainer,
3218 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3220 mapper(Suite, self.tbl_suite,
3221 properties = dict(suite_id = self.tbl_suite.c.id,
3222 policy_queue = relation(PolicyQueue),
3223 copy_queues = relation(BuildQueue,
3224 secondary=self.tbl_suite_build_queue_copy)),
3225 extension = validator)
3227 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3228 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3229 suite = relation(Suite, backref='suitesrcformats'),
3230 src_format_id = self.tbl_suite_src_formats.c.src_format,
3231 src_format = relation(SrcFormat)))
3233 mapper(Uid, self.tbl_uid,
3234 properties = dict(uid_id = self.tbl_uid.c.id,
3235 fingerprint = relation(Fingerprint)),
3236 extension = validator)
3238 mapper(UploadBlock, self.tbl_upload_blocks,
3239 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3240 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3241 uid = relation(Uid, backref="uploadblocks")))
3243 mapper(BinContents, self.tbl_bin_contents,
3245 binary = relation(DBBinary,
3246 backref=backref('contents', lazy='dynamic', cascade='all')),
3247 file = self.tbl_bin_contents.c.file))
3249 mapper(MetadataKey, self.tbl_metadata_keys,
3251 key_id = self.tbl_metadata_keys.c.key_id,
3252 key = self.tbl_metadata_keys.c.key))
3254 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3256 binary_id = self.tbl_binaries_metadata.c.bin_id,
3257 binary = relation(DBBinary),
3258 key_id = self.tbl_binaries_metadata.c.key_id,
3259 key = relation(MetadataKey),
3260 value = self.tbl_binaries_metadata.c.value))
3262 mapper(SourceMetadata, self.tbl_source_metadata,
3264 source_id = self.tbl_source_metadata.c.src_id,
3265 source = relation(DBSource),
3266 key_id = self.tbl_source_metadata.c.key_id,
3267 key = relation(MetadataKey),
3268 value = self.tbl_source_metadata.c.value))
3270 ## Connection functions
3271 def __createconn(self):
3272 from config import Config
3274 if cnf.has_key("DB::Service"):
3275 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3276 elif cnf.has_key("DB::Host"):
3278 connstr = "postgresql://%s" % cnf["DB::Host"]
3279 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3280 connstr += ":%s" % cnf["DB::Port"]
3281 connstr += "/%s" % cnf["DB::Name"]
3284 connstr = "postgresql:///%s" % cnf["DB::Name"]
3285 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3286 connstr += "?port=%s" % cnf["DB::Port"]
3288 engine_args = { 'echo': self.debug }
3289 if cnf.has_key('DB::PoolSize'):
3290 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3291 if cnf.has_key('DB::MaxOverflow'):
3292 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3293 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3294 cnf['DB::Unicode'] == 'false':
3295 engine_args['use_native_unicode'] = False
3297 # Monkey patch a new dialect in in order to support service= syntax
3298 import sqlalchemy.dialects.postgresql
3299 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3300 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3301 def create_connect_args(self, url):
3302 if str(url).startswith('postgresql://service='):
3304 servicename = str(url)[21:]
3305 return (['service=%s' % servicename], {})
3307 return PGDialect_psycopg2.create_connect_args(self, url)
3309 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3311 self.db_pg = create_engine(connstr, **engine_args)
3312 self.db_meta = MetaData()
3313 self.db_meta.bind = self.db_pg
3314 self.db_smaker = sessionmaker(bind=self.db_pg,
3318 self.__setuptables()
3319 self.__setupmappers()
3320 self.pid = os.getpid()
3323 # reinitialize DBConn in new processes
3324 if self.pid != os.getpid():
3327 return self.db_smaker()
3329 __all__.append('DBConn')