5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
63 from sqlalchemy import types as sqltypes
64 from sqlalchemy.orm.collections import attribute_mapped_collection
65 from sqlalchemy.ext.associationproxy import association_proxy
67 # Don't remove this, we re-export the exceptions to scripts which import us
68 from sqlalchemy.exc import *
69 from sqlalchemy.orm.exc import NoResultFound
71 # Only import Config until Queue stuff is changed to store its config
73 from config import Config
74 from textutils import fix_maintainer
75 from dak_exceptions import DBUpdateError, NoSourceFieldError
77 # suppress some deprecation warnings in squeeze related to sqlalchemy
79 warnings.filterwarnings('ignore', \
80 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 ################################################################################
86 # Patch in support for the debversion field type so that it works during
90 # that is for sqlalchemy 0.6
91 UserDefinedType = sqltypes.UserDefinedType
93 # this one for sqlalchemy 0.5
94 UserDefinedType = sqltypes.TypeEngine
96 class DebVersion(UserDefinedType):
97 def get_col_spec(self):
100 def bind_processor(self, dialect):
103 # ' = None' is needed for sqlalchemy 0.5:
104 def result_processor(self, dialect, coltype = None):
107 sa_major_version = sqlalchemy.__version__[0:3]
108 if sa_major_version in ["0.5", "0.6"]:
109 from sqlalchemy.databases import postgres
110 postgres.ischema_names['debversion'] = DebVersion
112 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
114 ################################################################################
116 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
118 ################################################################################
120 def session_wrapper(fn):
122 Wrapper around common ".., session=None):" handling. If the wrapped
123 function is called without passing 'session', we create a local one
124 and destroy it when the function ends.
126 Also attaches a commit_or_flush method to the session; if we created a
127 local session, this is a synonym for session.commit(), otherwise it is a
128 synonym for session.flush().
131 def wrapped(*args, **kwargs):
132 private_transaction = False
134 # Find the session object
135 session = kwargs.get('session')
138 if len(args) <= len(getargspec(fn)[0]) - 1:
139 # No session specified as last argument or in kwargs
140 private_transaction = True
141 session = kwargs['session'] = DBConn().session()
143 # Session is last argument in args
147 session = args[-1] = DBConn().session()
148 private_transaction = True
150 if private_transaction:
151 session.commit_or_flush = session.commit
153 session.commit_or_flush = session.flush
156 return fn(*args, **kwargs)
158 if private_transaction:
159 # We created a session; close it.
162 wrapped.__doc__ = fn.__doc__
163 wrapped.func_name = fn.func_name
167 __all__.append('session_wrapper')
169 ################################################################################
171 class ORMObject(object):
173 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
174 derived classes must implement the properties() method.
177 def properties(self):
179 This method should be implemented by all derived classes and returns a
180 list of the important properties. The properties 'created' and
181 'modified' will be added automatically. A suffix '_count' should be
182 added to properties that are lists or query objects. The most important
183 property name should be returned as the first element in the list
184 because it is used by repr().
190 Returns a JSON representation of the object based on the properties
191 returned from the properties() method.
194 # add created and modified
195 all_properties = self.properties() + ['created', 'modified']
196 for property in all_properties:
197 # check for list or query
198 if property[-6:] == '_count':
199 real_property = property[:-6]
200 if not hasattr(self, real_property):
202 value = getattr(self, real_property)
203 if hasattr(value, '__len__'):
206 elif hasattr(value, 'count'):
208 value = value.count()
210 raise KeyError('Do not understand property %s.' % property)
212 if not hasattr(self, property):
215 value = getattr(self, property)
219 elif isinstance(value, ORMObject):
220 # use repr() for ORMObject types
223 # we want a string for all other types because json cannot
226 data[property] = value
227 return json.dumps(data)
231 Returns the name of the class.
233 return type(self).__name__
237 Returns a short string representation of the object using the first
238 element from the properties() method.
240 primary_property = self.properties()[0]
241 value = getattr(self, primary_property)
242 return '<%s %s>' % (self.classname(), str(value))
246 Returns a human readable form of the object using the properties()
249 return '<%s %s>' % (self.classname(), self.json())
251 def not_null_constraints(self):
253 Returns a list of properties that must be not NULL. Derived classes
254 should override this method if needed.
258 validation_message = \
259 "Validation failed because property '%s' must not be empty in object\n%s"
263 This function validates the not NULL constraints as returned by
264 not_null_constraints(). It raises the DBUpdateError exception if
267 for property in self.not_null_constraints():
268 # TODO: It is a bit awkward that the mapper configuration allow
269 # directly setting the numeric _id columns. We should get rid of it
271 if hasattr(self, property + '_id') and \
272 getattr(self, property + '_id') is not None:
274 if not hasattr(self, property) or getattr(self, property) is None:
275 raise DBUpdateError(self.validation_message % \
276 (property, str(self)))
280 def get(cls, primary_key, session = None):
282 This is a support function that allows getting an object by its primary
285 Architecture.get(3[, session])
287 instead of the more verbose
289 session.query(Architecture).get(3)
291 return session.query(cls).get(primary_key)
293 def session(self, replace = False):
295 Returns the current session that is associated with the object. May
296 return None is object is in detached state.
299 return object_session(self)
301 def clone(self, session = None):
303 Clones the current object in a new session and returns the new clone. A
304 fresh session is created if the optional session parameter is not
305 provided. The function will fail if a session is provided and has
308 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
309 an existing object to allow several threads to work with their own
310 instances of an ORMObject.
312 WARNING: Only persistent (committed) objects can be cloned. Changes
313 made to the original object that are not committed yet will get lost.
314 The session of the new object will always be rolled back to avoid
318 if self.session() is None:
319 raise RuntimeError( \
320 'Method clone() failed for detached object:\n%s' % self)
321 self.session().flush()
322 mapper = object_mapper(self)
323 primary_key = mapper.primary_key_from_instance(self)
324 object_class = self.__class__
326 session = DBConn().session()
327 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
328 raise RuntimeError( \
329 'Method clone() failed due to unflushed changes in session.')
330 new_object = session.query(object_class).get(primary_key)
332 if new_object is None:
333 raise RuntimeError( \
334 'Method clone() failed for non-persistent object:\n%s' % self)
337 __all__.append('ORMObject')
339 ################################################################################
341 class Validator(MapperExtension):
343 This class calls the validate() method for each instance for the
344 'before_update' and 'before_insert' events. A global object validator is
345 used for configuring the individual mappers.
348 def before_update(self, mapper, connection, instance):
352 def before_insert(self, mapper, connection, instance):
356 validator = Validator()
358 ################################################################################
360 class Architecture(ORMObject):
361 def __init__(self, arch_string = None, description = None):
362 self.arch_string = arch_string
363 self.description = description
365 def __eq__(self, val):
366 if isinstance(val, str):
367 return (self.arch_string== val)
368 # This signals to use the normal comparison operator
369 return NotImplemented
371 def __ne__(self, val):
372 if isinstance(val, str):
373 return (self.arch_string != val)
374 # This signals to use the normal comparison operator
375 return NotImplemented
377 def properties(self):
378 return ['arch_string', 'arch_id', 'suites_count']
380 def not_null_constraints(self):
381 return ['arch_string']
383 __all__.append('Architecture')
386 def get_architecture(architecture, session=None):
388 Returns database id for given C{architecture}.
390 @type architecture: string
391 @param architecture: The name of the architecture
393 @type session: Session
394 @param session: Optional SQLA session object (a temporary one will be
395 generated if not supplied)
398 @return: Architecture object for the given arch (None if not present)
401 q = session.query(Architecture).filter_by(arch_string=architecture)
405 except NoResultFound:
408 __all__.append('get_architecture')
410 # TODO: should be removed because the implementation is too trivial
412 def get_architecture_suites(architecture, session=None):
414 Returns list of Suite objects for given C{architecture} name
416 @type architecture: str
417 @param architecture: Architecture name to search for
419 @type session: Session
420 @param session: Optional SQL session object (a temporary one will be
421 generated if not supplied)
424 @return: list of Suite objects for the given name (may be empty)
427 return get_architecture(architecture, session).suites
429 __all__.append('get_architecture_suites')
431 ################################################################################
433 class Archive(object):
434 def __init__(self, *args, **kwargs):
438 return '<Archive %s>' % self.archive_name
440 __all__.append('Archive')
443 def get_archive(archive, session=None):
445 returns database id for given C{archive}.
447 @type archive: string
448 @param archive: the name of the arhive
450 @type session: Session
451 @param session: Optional SQLA session object (a temporary one will be
452 generated if not supplied)
455 @return: Archive object for the given name (None if not present)
458 archive = archive.lower()
460 q = session.query(Archive).filter_by(archive_name=archive)
464 except NoResultFound:
467 __all__.append('get_archive')
469 ################################################################################
471 class BinContents(ORMObject):
472 def __init__(self, file = None, binary = None):
476 def properties(self):
477 return ['file', 'binary']
479 __all__.append('BinContents')
481 ################################################################################
483 class DBBinary(ORMObject):
484 def __init__(self, package = None, source = None, version = None, \
485 maintainer = None, architecture = None, poolfile = None, \
487 self.package = package
489 self.version = version
490 self.maintainer = maintainer
491 self.architecture = architecture
492 self.poolfile = poolfile
493 self.binarytype = binarytype
495 def properties(self):
496 return ['package', 'version', 'maintainer', 'source', 'architecture', \
497 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
498 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
500 def not_null_constraints(self):
501 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
504 metadata = association_proxy('key', 'value')
506 def get_component_name(self):
507 return self.poolfile.location.component.component_name
509 def scan_contents(self):
511 Yields the contents of the package. Only regular files are yielded and
512 the path names are normalized after converting them from either utf-8
513 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
514 package does not contain any regular file.
516 fullpath = self.poolfile.fullpath
517 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE)
518 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
519 for member in tar.getmembers():
520 if not member.isdir():
521 name = normpath(member.name)
522 # enforce proper utf-8 encoding
525 except UnicodeDecodeError:
526 name = name.decode('iso8859-1').encode('utf-8')
532 __all__.append('DBBinary')
535 def get_suites_binary_in(package, session=None):
537 Returns list of Suite objects which given C{package} name is in
540 @param package: DBBinary package name to search for
543 @return: list of Suite objects for the given package
546 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
548 __all__.append('get_suites_binary_in')
551 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
553 Returns the component name of the newest binary package in suite_list or
554 None if no package is found. The result can be optionally filtered by a list
555 of architecture names.
558 @param package: DBBinary package name to search for
560 @type suite_list: list of str
561 @param suite_list: list of suite_name items
563 @type arch_list: list of str
564 @param arch_list: optional list of arch_string items that defaults to []
566 @rtype: str or NoneType
567 @return: name of component or None
570 q = session.query(DBBinary).filter_by(package = package). \
571 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
572 if len(arch_list) > 0:
573 q = q.join(DBBinary.architecture). \
574 filter(Architecture.arch_string.in_(arch_list))
575 binary = q.order_by(desc(DBBinary.version)).first()
579 return binary.get_component_name()
581 __all__.append('get_component_by_package_suite')
583 ################################################################################
585 class BinaryACL(object):
586 def __init__(self, *args, **kwargs):
590 return '<BinaryACL %s>' % self.binary_acl_id
592 __all__.append('BinaryACL')
594 ################################################################################
596 class BinaryACLMap(object):
597 def __init__(self, *args, **kwargs):
601 return '<BinaryACLMap %s>' % self.binary_acl_map_id
603 __all__.append('BinaryACLMap')
605 ################################################################################
610 ArchiveDir "%(archivepath)s";
611 OverrideDir "%(overridedir)s";
612 CacheDir "%(cachedir)s";
617 Packages::Compress ". bzip2 gzip";
618 Sources::Compress ". bzip2 gzip";
623 bindirectory "incoming"
628 BinOverride "override.sid.all3";
629 BinCacheDB "packages-accepted.db";
631 FileList "%(filelist)s";
634 Packages::Extensions ".deb .udeb";
637 bindirectory "incoming/"
640 BinOverride "override.sid.all3";
641 SrcOverride "override.sid.all3.src";
642 FileList "%(filelist)s";
646 class BuildQueue(object):
647 def __init__(self, *args, **kwargs):
651 return '<BuildQueue %s>' % self.queue_name
653 def write_metadata(self, starttime, force=False):
654 # Do we write out metafiles?
655 if not (force or self.generate_metadata):
658 session = DBConn().session().object_session(self)
660 fl_fd = fl_name = ac_fd = ac_name = None
662 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
663 startdir = os.getcwd()
666 # Grab files we want to include
667 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
668 # Write file list with newer files
669 (fl_fd, fl_name) = mkstemp()
671 os.write(fl_fd, '%s\n' % n.fullpath)
676 # Write minimal apt.conf
677 # TODO: Remove hardcoding from template
678 (ac_fd, ac_name) = mkstemp()
679 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
681 'cachedir': cnf["Dir::Cache"],
682 'overridedir': cnf["Dir::Override"],
686 # Run apt-ftparchive generate
687 os.chdir(os.path.dirname(ac_name))
688 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
690 # Run apt-ftparchive release
691 # TODO: Eww - fix this
692 bname = os.path.basename(self.path)
696 # We have to remove the Release file otherwise it'll be included in the
699 os.unlink(os.path.join(bname, 'Release'))
703 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
705 # Crude hack with open and append, but this whole section is and should be redone.
706 if self.notautomatic:
707 release=open("Release", "a")
708 release.write("NotAutomatic: yes")
713 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
714 if cnf.has_key("Dinstall::SigningPubKeyring"):
715 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
717 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
719 # Move the files if we got this far
720 os.rename('Release', os.path.join(bname, 'Release'))
722 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
724 # Clean up any left behind files
751 def clean_and_update(self, starttime, Logger, dryrun=False):
752 """WARNING: This routine commits for you"""
753 session = DBConn().session().object_session(self)
755 if self.generate_metadata and not dryrun:
756 self.write_metadata(starttime)
758 # Grab files older than our execution time
759 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
765 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
767 Logger.log(["I: Removing %s from the queue" % o.fullpath])
768 os.unlink(o.fullpath)
771 # If it wasn't there, don't worry
772 if e.errno == ENOENT:
775 # TODO: Replace with proper logging call
776 Logger.log(["E: Could not remove %s" % o.fullpath])
783 for f in os.listdir(self.path):
784 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
788 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
789 except NoResultFound:
790 fp = os.path.join(self.path, f)
792 Logger.log(["I: Would remove unused link %s" % fp])
794 Logger.log(["I: Removing unused link %s" % fp])
798 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
800 def add_file_from_pool(self, poolfile):
801 """Copies a file into the pool. Assumes that the PoolFile object is
802 attached to the same SQLAlchemy session as the Queue object is.
804 The caller is responsible for committing after calling this function."""
805 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
807 # Check if we have a file of this name or this ID already
808 for f in self.queuefiles:
809 if f.fileid is not None and f.fileid == poolfile.file_id or \
810 f.poolfile.filename == poolfile_basename:
811 # In this case, update the BuildQueueFile entry so we
812 # don't remove it too early
813 f.lastused = datetime.now()
814 DBConn().session().object_session(poolfile).add(f)
817 # Prepare BuildQueueFile object
818 qf = BuildQueueFile()
819 qf.build_queue_id = self.queue_id
820 qf.lastused = datetime.now()
821 qf.filename = poolfile_basename
823 targetpath = poolfile.fullpath
824 queuepath = os.path.join(self.path, poolfile_basename)
828 # We need to copy instead of symlink
830 utils.copy(targetpath, queuepath)
831 # NULL in the fileid field implies a copy
834 os.symlink(targetpath, queuepath)
835 qf.fileid = poolfile.file_id
839 # Get the same session as the PoolFile is using and add the qf to it
840 DBConn().session().object_session(poolfile).add(qf)
845 __all__.append('BuildQueue')
848 def get_build_queue(queuename, session=None):
850 Returns BuildQueue object for given C{queue name}, creating it if it does not
853 @type queuename: string
854 @param queuename: The name of the queue
856 @type session: Session
857 @param session: Optional SQLA session object (a temporary one will be
858 generated if not supplied)
861 @return: BuildQueue object for the given queue
864 q = session.query(BuildQueue).filter_by(queue_name=queuename)
868 except NoResultFound:
871 __all__.append('get_build_queue')
873 ################################################################################
875 class BuildQueueFile(object):
876 def __init__(self, *args, **kwargs):
880 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
884 return os.path.join(self.buildqueue.path, self.filename)
887 __all__.append('BuildQueueFile')
889 ################################################################################
891 class ChangePendingBinary(object):
892 def __init__(self, *args, **kwargs):
896 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
898 __all__.append('ChangePendingBinary')
900 ################################################################################
902 class ChangePendingFile(object):
903 def __init__(self, *args, **kwargs):
907 return '<ChangePendingFile %s>' % self.change_pending_file_id
909 __all__.append('ChangePendingFile')
911 ################################################################################
913 class ChangePendingSource(object):
914 def __init__(self, *args, **kwargs):
918 return '<ChangePendingSource %s>' % self.change_pending_source_id
920 __all__.append('ChangePendingSource')
922 ################################################################################
924 class Component(ORMObject):
925 def __init__(self, component_name = None):
926 self.component_name = component_name
928 def __eq__(self, val):
929 if isinstance(val, str):
930 return (self.component_name == val)
931 # This signals to use the normal comparison operator
932 return NotImplemented
934 def __ne__(self, val):
935 if isinstance(val, str):
936 return (self.component_name != val)
937 # This signals to use the normal comparison operator
938 return NotImplemented
940 def properties(self):
941 return ['component_name', 'component_id', 'description', \
942 'location_count', 'meets_dfsg', 'overrides_count']
944 def not_null_constraints(self):
945 return ['component_name']
948 __all__.append('Component')
951 def get_component(component, session=None):
953 Returns database id for given C{component}.
955 @type component: string
956 @param component: The name of the override type
959 @return: the database id for the given component
962 component = component.lower()
964 q = session.query(Component).filter_by(component_name=component)
968 except NoResultFound:
971 __all__.append('get_component')
973 ################################################################################
975 class DBConfig(object):
976 def __init__(self, *args, **kwargs):
980 return '<DBConfig %s>' % self.name
982 __all__.append('DBConfig')
984 ################################################################################
987 def get_or_set_contents_file_id(filename, session=None):
989 Returns database id for given filename.
991 If no matching file is found, a row is inserted.
993 @type filename: string
994 @param filename: The filename
995 @type session: SQLAlchemy
996 @param session: Optional SQL session object (a temporary one will be
997 generated if not supplied). If not passed, a commit will be performed at
998 the end of the function, otherwise the caller is responsible for commiting.
1001 @return: the database id for the given component
1004 q = session.query(ContentFilename).filter_by(filename=filename)
1007 ret = q.one().cafilename_id
1008 except NoResultFound:
1009 cf = ContentFilename()
1010 cf.filename = filename
1012 session.commit_or_flush()
1013 ret = cf.cafilename_id
1017 __all__.append('get_or_set_contents_file_id')
1020 def get_contents(suite, overridetype, section=None, session=None):
1022 Returns contents for a suite / overridetype combination, limiting
1023 to a section if not None.
1026 @param suite: Suite object
1028 @type overridetype: OverrideType
1029 @param overridetype: OverrideType object
1031 @type section: Section
1032 @param section: Optional section object to limit results to
1034 @type session: SQLAlchemy
1035 @param session: Optional SQL session object (a temporary one will be
1036 generated if not supplied)
1038 @rtype: ResultsProxy
1039 @return: ResultsProxy object set up to return tuples of (filename, section,
1043 # find me all of the contents for a given suite
1044 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1048 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1049 JOIN content_file_names n ON (c.filename=n.id)
1050 JOIN binaries b ON (b.id=c.binary_pkg)
1051 JOIN override o ON (o.package=b.package)
1052 JOIN section s ON (s.id=o.section)
1053 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1054 AND b.type=:overridetypename"""
1056 vals = {'suiteid': suite.suite_id,
1057 'overridetypeid': overridetype.overridetype_id,
1058 'overridetypename': overridetype.overridetype}
1060 if section is not None:
1061 contents_q += " AND s.id = :sectionid"
1062 vals['sectionid'] = section.section_id
1064 contents_q += " ORDER BY fn"
1066 return session.execute(contents_q, vals)
1068 __all__.append('get_contents')
1070 ################################################################################
1072 class ContentFilepath(object):
1073 def __init__(self, *args, **kwargs):
1077 return '<ContentFilepath %s>' % self.filepath
1079 __all__.append('ContentFilepath')
1082 def get_or_set_contents_path_id(filepath, session=None):
1084 Returns database id for given path.
1086 If no matching file is found, a row is inserted.
1088 @type filepath: string
1089 @param filepath: The filepath
1091 @type session: SQLAlchemy
1092 @param session: Optional SQL session object (a temporary one will be
1093 generated if not supplied). If not passed, a commit will be performed at
1094 the end of the function, otherwise the caller is responsible for commiting.
1097 @return: the database id for the given path
1100 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1103 ret = q.one().cafilepath_id
1104 except NoResultFound:
1105 cf = ContentFilepath()
1106 cf.filepath = filepath
1108 session.commit_or_flush()
1109 ret = cf.cafilepath_id
1113 __all__.append('get_or_set_contents_path_id')
1115 ################################################################################
1117 class ContentAssociation(object):
1118 def __init__(self, *args, **kwargs):
1122 return '<ContentAssociation %s>' % self.ca_id
1124 __all__.append('ContentAssociation')
1126 def insert_content_paths(binary_id, fullpaths, session=None):
1128 Make sure given path is associated with given binary id
1130 @type binary_id: int
1131 @param binary_id: the id of the binary
1132 @type fullpaths: list
1133 @param fullpaths: the list of paths of the file being associated with the binary
1134 @type session: SQLAlchemy session
1135 @param session: Optional SQLAlchemy session. If this is passed, the caller
1136 is responsible for ensuring a transaction has begun and committing the
1137 results or rolling back based on the result code. If not passed, a commit
1138 will be performed at the end of the function, otherwise the caller is
1139 responsible for commiting.
1141 @return: True upon success
1144 privatetrans = False
1146 session = DBConn().session()
1151 def generate_path_dicts():
1152 for fullpath in fullpaths:
1153 if fullpath.startswith( './' ):
1154 fullpath = fullpath[2:]
1156 yield {'filename':fullpath, 'id': binary_id }
1158 for d in generate_path_dicts():
1159 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1168 traceback.print_exc()
1170 # Only rollback if we set up the session ourself
1177 __all__.append('insert_content_paths')
1179 ################################################################################
1181 class DSCFile(object):
1182 def __init__(self, *args, **kwargs):
1186 return '<DSCFile %s>' % self.dscfile_id
1188 __all__.append('DSCFile')
1191 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1193 Returns a list of DSCFiles which may be empty
1195 @type dscfile_id: int (optional)
1196 @param dscfile_id: the dscfile_id of the DSCFiles to find
1198 @type source_id: int (optional)
1199 @param source_id: the source id related to the DSCFiles to find
1201 @type poolfile_id: int (optional)
1202 @param poolfile_id: the poolfile id related to the DSCFiles to find
1205 @return: Possibly empty list of DSCFiles
1208 q = session.query(DSCFile)
1210 if dscfile_id is not None:
1211 q = q.filter_by(dscfile_id=dscfile_id)
1213 if source_id is not None:
1214 q = q.filter_by(source_id=source_id)
1216 if poolfile_id is not None:
1217 q = q.filter_by(poolfile_id=poolfile_id)
1221 __all__.append('get_dscfiles')
1223 ################################################################################
1225 class PoolFile(ORMObject):
1226 def __init__(self, filename = None, location = None, filesize = -1, \
1228 self.filename = filename
1229 self.location = location
1230 self.filesize = filesize
1231 self.md5sum = md5sum
1235 return os.path.join(self.location.path, self.filename)
1237 def is_valid(self, filesize = -1, md5sum = None):
1238 return self.filesize == long(filesize) and self.md5sum == md5sum
1240 def properties(self):
1241 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1242 'sha256sum', 'location', 'source', 'binary', 'last_used']
1244 def not_null_constraints(self):
1245 return ['filename', 'md5sum', 'location']
1247 __all__.append('PoolFile')
1250 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1253 (ValidFileFound [boolean], PoolFile object or None)
1255 @type filename: string
1256 @param filename: the filename of the file to check against the DB
1259 @param filesize: the size of the file to check against the DB
1261 @type md5sum: string
1262 @param md5sum: the md5sum of the file to check against the DB
1264 @type location_id: int
1265 @param location_id: the id of the location to look in
1268 @return: Tuple of length 2.
1269 - If valid pool file found: (C{True}, C{PoolFile object})
1270 - If valid pool file not found:
1271 - (C{False}, C{None}) if no file found
1272 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1275 poolfile = session.query(Location).get(location_id). \
1276 files.filter_by(filename=filename).first()
1278 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1281 return (valid, poolfile)
1283 __all__.append('check_poolfile')
1285 # TODO: the implementation can trivially be inlined at the place where the
1286 # function is called
1288 def get_poolfile_by_id(file_id, session=None):
1290 Returns a PoolFile objects or None for the given id
1293 @param file_id: the id of the file to look for
1295 @rtype: PoolFile or None
1296 @return: either the PoolFile object or None
1299 return session.query(PoolFile).get(file_id)
1301 __all__.append('get_poolfile_by_id')
1304 def get_poolfile_like_name(filename, session=None):
1306 Returns an array of PoolFile objects which are like the given name
1308 @type filename: string
1309 @param filename: the filename of the file to check against the DB
1312 @return: array of PoolFile objects
1315 # TODO: There must be a way of properly using bind parameters with %FOO%
1316 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1320 __all__.append('get_poolfile_like_name')
1323 def add_poolfile(filename, datadict, location_id, session=None):
1325 Add a new file to the pool
1327 @type filename: string
1328 @param filename: filename
1330 @type datadict: dict
1331 @param datadict: dict with needed data
1333 @type location_id: int
1334 @param location_id: database id of the location
1337 @return: the PoolFile object created
1339 poolfile = PoolFile()
1340 poolfile.filename = filename
1341 poolfile.filesize = datadict["size"]
1342 poolfile.md5sum = datadict["md5sum"]
1343 poolfile.sha1sum = datadict["sha1sum"]
1344 poolfile.sha256sum = datadict["sha256sum"]
1345 poolfile.location_id = location_id
1347 session.add(poolfile)
1348 # Flush to get a file id (NB: This is not a commit)
1353 __all__.append('add_poolfile')
1355 ################################################################################
1357 class Fingerprint(ORMObject):
1358 def __init__(self, fingerprint = None):
1359 self.fingerprint = fingerprint
1361 def properties(self):
1362 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1365 def not_null_constraints(self):
1366 return ['fingerprint']
1368 __all__.append('Fingerprint')
1371 def get_fingerprint(fpr, session=None):
1373 Returns Fingerprint object for given fpr.
1376 @param fpr: The fpr to find / add
1378 @type session: SQLAlchemy
1379 @param session: Optional SQL session object (a temporary one will be
1380 generated if not supplied).
1383 @return: the Fingerprint object for the given fpr or None
1386 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1390 except NoResultFound:
1395 __all__.append('get_fingerprint')
1398 def get_or_set_fingerprint(fpr, session=None):
1400 Returns Fingerprint object for given fpr.
1402 If no matching fpr is found, a row is inserted.
1405 @param fpr: The fpr to find / add
1407 @type session: SQLAlchemy
1408 @param session: Optional SQL session object (a temporary one will be
1409 generated if not supplied). If not passed, a commit will be performed at
1410 the end of the function, otherwise the caller is responsible for commiting.
1411 A flush will be performed either way.
1414 @return: the Fingerprint object for the given fpr
1417 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1421 except NoResultFound:
1422 fingerprint = Fingerprint()
1423 fingerprint.fingerprint = fpr
1424 session.add(fingerprint)
1425 session.commit_or_flush()
1430 __all__.append('get_or_set_fingerprint')
1432 ################################################################################
1434 # Helper routine for Keyring class
1435 def get_ldap_name(entry):
1437 for k in ["cn", "mn", "sn"]:
1439 if ret and ret[0] != "" and ret[0] != "-":
1441 return " ".join(name)
1443 ################################################################################
1445 class Keyring(object):
1446 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1447 " --with-colons --fingerprint --fingerprint"
1452 def __init__(self, *args, **kwargs):
1456 return '<Keyring %s>' % self.keyring_name
1458 def de_escape_gpg_str(self, txt):
1459 esclist = re.split(r'(\\x..)', txt)
1460 for x in range(1,len(esclist),2):
1461 esclist[x] = "%c" % (int(esclist[x][2:],16))
1462 return "".join(esclist)
1464 def parse_address(self, uid):
1465 """parses uid and returns a tuple of real name and email address"""
1467 (name, address) = email.Utils.parseaddr(uid)
1468 name = re.sub(r"\s*[(].*[)]", "", name)
1469 name = self.de_escape_gpg_str(name)
1472 return (name, address)
1474 def load_keys(self, keyring):
1475 if not self.keyring_id:
1476 raise Exception('Must be initialized with database information')
1478 k = os.popen(self.gpg_invocation % keyring, "r")
1482 for line in k.xreadlines():
1483 field = line.split(":")
1484 if field[0] == "pub":
1487 (name, addr) = self.parse_address(field[9])
1489 self.keys[key]["email"] = addr
1490 self.keys[key]["name"] = name
1491 self.keys[key]["fingerprints"] = []
1493 elif key and field[0] == "sub" and len(field) >= 12:
1494 signingkey = ("s" in field[11])
1495 elif key and field[0] == "uid":
1496 (name, addr) = self.parse_address(field[9])
1497 if "email" not in self.keys[key] and "@" in addr:
1498 self.keys[key]["email"] = addr
1499 self.keys[key]["name"] = name
1500 elif signingkey and field[0] == "fpr":
1501 self.keys[key]["fingerprints"].append(field[9])
1502 self.fpr_lookup[field[9]] = key
1504 def import_users_from_ldap(self, session):
1508 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1509 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1511 l = ldap.open(LDAPServer)
1512 l.simple_bind_s("","")
1513 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1514 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1515 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1517 ldap_fin_uid_id = {}
1524 uid = entry["uid"][0]
1525 name = get_ldap_name(entry)
1526 fingerprints = entry["keyFingerPrint"]
1528 for f in fingerprints:
1529 key = self.fpr_lookup.get(f, None)
1530 if key not in self.keys:
1532 self.keys[key]["uid"] = uid
1536 keyid = get_or_set_uid(uid, session).uid_id
1537 byuid[keyid] = (uid, name)
1538 byname[uid] = (keyid, name)
1540 return (byname, byuid)
1542 def generate_users_from_keyring(self, format, session):
1546 for x in self.keys.keys():
1547 if "email" not in self.keys[x]:
1549 self.keys[x]["uid"] = format % "invalid-uid"
1551 uid = format % self.keys[x]["email"]
1552 keyid = get_or_set_uid(uid, session).uid_id
1553 byuid[keyid] = (uid, self.keys[x]["name"])
1554 byname[uid] = (keyid, self.keys[x]["name"])
1555 self.keys[x]["uid"] = uid
1558 uid = format % "invalid-uid"
1559 keyid = get_or_set_uid(uid, session).uid_id
1560 byuid[keyid] = (uid, "ungeneratable user id")
1561 byname[uid] = (keyid, "ungeneratable user id")
1563 return (byname, byuid)
1565 __all__.append('Keyring')
1568 def get_keyring(keyring, session=None):
1570 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1571 If C{keyring} already has an entry, simply return the existing Keyring
1573 @type keyring: string
1574 @param keyring: the keyring name
1577 @return: the Keyring object for this keyring
1580 q = session.query(Keyring).filter_by(keyring_name=keyring)
1584 except NoResultFound:
1587 __all__.append('get_keyring')
1589 ################################################################################
1591 class KeyringACLMap(object):
1592 def __init__(self, *args, **kwargs):
1596 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1598 __all__.append('KeyringACLMap')
1600 ################################################################################
1602 class DBChange(object):
1603 def __init__(self, *args, **kwargs):
1607 return '<DBChange %s>' % self.changesname
1609 def clean_from_queue(self):
1610 session = DBConn().session().object_session(self)
1612 # Remove changes_pool_files entries
1615 # Remove changes_pending_files references
1618 # Clear out of queue
1619 self.in_queue = None
1620 self.approved_for_id = None
1622 __all__.append('DBChange')
1625 def get_dbchange(filename, session=None):
1627 returns DBChange object for given C{filename}.
1629 @type filename: string
1630 @param filename: the name of the file
1632 @type session: Session
1633 @param session: Optional SQLA session object (a temporary one will be
1634 generated if not supplied)
1637 @return: DBChange object for the given filename (C{None} if not present)
1640 q = session.query(DBChange).filter_by(changesname=filename)
1644 except NoResultFound:
1647 __all__.append('get_dbchange')
1649 ################################################################################
1651 class Location(ORMObject):
1652 def __init__(self, path = None, component = None):
1654 self.component = component
1655 # the column 'type' should go away, see comment at mapper
1656 self.archive_type = 'pool'
1658 def properties(self):
1659 return ['path', 'location_id', 'archive_type', 'component', \
1662 def not_null_constraints(self):
1663 return ['path', 'archive_type']
1665 __all__.append('Location')
1668 def get_location(location, component=None, archive=None, session=None):
1670 Returns Location object for the given combination of location, component
1673 @type location: string
1674 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1676 @type component: string
1677 @param component: the component name (if None, no restriction applied)
1679 @type archive: string
1680 @param archive: the archive name (if None, no restriction applied)
1682 @rtype: Location / None
1683 @return: Either a Location object or None if one can't be found
1686 q = session.query(Location).filter_by(path=location)
1688 if archive is not None:
1689 q = q.join(Archive).filter_by(archive_name=archive)
1691 if component is not None:
1692 q = q.join(Component).filter_by(component_name=component)
1696 except NoResultFound:
1699 __all__.append('get_location')
1701 ################################################################################
1703 class Maintainer(ORMObject):
1704 def __init__(self, name = None):
1707 def properties(self):
1708 return ['name', 'maintainer_id']
1710 def not_null_constraints(self):
1713 def get_split_maintainer(self):
1714 if not hasattr(self, 'name') or self.name is None:
1715 return ('', '', '', '')
1717 return fix_maintainer(self.name.strip())
1719 __all__.append('Maintainer')
1722 def get_or_set_maintainer(name, session=None):
1724 Returns Maintainer object for given maintainer name.
1726 If no matching maintainer name is found, a row is inserted.
1729 @param name: The maintainer name to add
1731 @type session: SQLAlchemy
1732 @param session: Optional SQL session object (a temporary one will be
1733 generated if not supplied). If not passed, a commit will be performed at
1734 the end of the function, otherwise the caller is responsible for commiting.
1735 A flush will be performed either way.
1738 @return: the Maintainer object for the given maintainer
1741 q = session.query(Maintainer).filter_by(name=name)
1744 except NoResultFound:
1745 maintainer = Maintainer()
1746 maintainer.name = name
1747 session.add(maintainer)
1748 session.commit_or_flush()
1753 __all__.append('get_or_set_maintainer')
1756 def get_maintainer(maintainer_id, session=None):
1758 Return the name of the maintainer behind C{maintainer_id} or None if that
1759 maintainer_id is invalid.
1761 @type maintainer_id: int
1762 @param maintainer_id: the id of the maintainer
1765 @return: the Maintainer with this C{maintainer_id}
1768 return session.query(Maintainer).get(maintainer_id)
1770 __all__.append('get_maintainer')
1772 ################################################################################
1774 class NewComment(object):
1775 def __init__(self, *args, **kwargs):
1779 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1781 __all__.append('NewComment')
1784 def has_new_comment(package, version, session=None):
1786 Returns true if the given combination of C{package}, C{version} has a comment.
1788 @type package: string
1789 @param package: name of the package
1791 @type version: string
1792 @param version: package version
1794 @type session: Session
1795 @param session: Optional SQLA session object (a temporary one will be
1796 generated if not supplied)
1802 q = session.query(NewComment)
1803 q = q.filter_by(package=package)
1804 q = q.filter_by(version=version)
1806 return bool(q.count() > 0)
1808 __all__.append('has_new_comment')
1811 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1813 Returns (possibly empty) list of NewComment objects for the given
1816 @type package: string (optional)
1817 @param package: name of the package
1819 @type version: string (optional)
1820 @param version: package version
1822 @type comment_id: int (optional)
1823 @param comment_id: An id of a comment
1825 @type session: Session
1826 @param session: Optional SQLA session object (a temporary one will be
1827 generated if not supplied)
1830 @return: A (possibly empty) list of NewComment objects will be returned
1833 q = session.query(NewComment)
1834 if package is not None: q = q.filter_by(package=package)
1835 if version is not None: q = q.filter_by(version=version)
1836 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1840 __all__.append('get_new_comments')
1842 ################################################################################
1844 class Override(ORMObject):
1845 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1846 section = None, priority = None):
1847 self.package = package
1849 self.component = component
1850 self.overridetype = overridetype
1851 self.section = section
1852 self.priority = priority
1854 def properties(self):
1855 return ['package', 'suite', 'component', 'overridetype', 'section', \
1858 def not_null_constraints(self):
1859 return ['package', 'suite', 'component', 'overridetype', 'section']
1861 __all__.append('Override')
1864 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1866 Returns Override object for the given parameters
1868 @type package: string
1869 @param package: The name of the package
1871 @type suite: string, list or None
1872 @param suite: The name of the suite (or suites if a list) to limit to. If
1873 None, don't limit. Defaults to None.
1875 @type component: string, list or None
1876 @param component: The name of the component (or components if a list) to
1877 limit to. If None, don't limit. Defaults to None.
1879 @type overridetype: string, list or None
1880 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1881 limit to. If None, don't limit. Defaults to None.
1883 @type session: Session
1884 @param session: Optional SQLA session object (a temporary one will be
1885 generated if not supplied)
1888 @return: A (possibly empty) list of Override objects will be returned
1891 q = session.query(Override)
1892 q = q.filter_by(package=package)
1894 if suite is not None:
1895 if not isinstance(suite, list): suite = [suite]
1896 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1898 if component is not None:
1899 if not isinstance(component, list): component = [component]
1900 q = q.join(Component).filter(Component.component_name.in_(component))
1902 if overridetype is not None:
1903 if not isinstance(overridetype, list): overridetype = [overridetype]
1904 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1908 __all__.append('get_override')
1911 ################################################################################
1913 class OverrideType(ORMObject):
1914 def __init__(self, overridetype = None):
1915 self.overridetype = overridetype
1917 def properties(self):
1918 return ['overridetype', 'overridetype_id', 'overrides_count']
1920 def not_null_constraints(self):
1921 return ['overridetype']
1923 __all__.append('OverrideType')
1926 def get_override_type(override_type, session=None):
1928 Returns OverrideType object for given C{override type}.
1930 @type override_type: string
1931 @param override_type: The name of the override type
1933 @type session: Session
1934 @param session: Optional SQLA session object (a temporary one will be
1935 generated if not supplied)
1938 @return: the database id for the given override type
1941 q = session.query(OverrideType).filter_by(overridetype=override_type)
1945 except NoResultFound:
1948 __all__.append('get_override_type')
1950 ################################################################################
1952 class PolicyQueue(object):
1953 def __init__(self, *args, **kwargs):
1957 return '<PolicyQueue %s>' % self.queue_name
1959 __all__.append('PolicyQueue')
1962 def get_policy_queue(queuename, session=None):
1964 Returns PolicyQueue object for given C{queue name}
1966 @type queuename: string
1967 @param queuename: The name of the queue
1969 @type session: Session
1970 @param session: Optional SQLA session object (a temporary one will be
1971 generated if not supplied)
1974 @return: PolicyQueue object for the given queue
1977 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1981 except NoResultFound:
1984 __all__.append('get_policy_queue')
1987 def get_policy_queue_from_path(pathname, session=None):
1989 Returns PolicyQueue object for given C{path name}
1991 @type queuename: string
1992 @param queuename: The path
1994 @type session: Session
1995 @param session: Optional SQLA session object (a temporary one will be
1996 generated if not supplied)
1999 @return: PolicyQueue object for the given queue
2002 q = session.query(PolicyQueue).filter_by(path=pathname)
2006 except NoResultFound:
2009 __all__.append('get_policy_queue_from_path')
2011 ################################################################################
2013 class Priority(ORMObject):
2014 def __init__(self, priority = None, level = None):
2015 self.priority = priority
2018 def properties(self):
2019 return ['priority', 'priority_id', 'level', 'overrides_count']
2021 def not_null_constraints(self):
2022 return ['priority', 'level']
2024 def __eq__(self, val):
2025 if isinstance(val, str):
2026 return (self.priority == val)
2027 # This signals to use the normal comparison operator
2028 return NotImplemented
2030 def __ne__(self, val):
2031 if isinstance(val, str):
2032 return (self.priority != val)
2033 # This signals to use the normal comparison operator
2034 return NotImplemented
2036 __all__.append('Priority')
2039 def get_priority(priority, session=None):
2041 Returns Priority object for given C{priority name}.
2043 @type priority: string
2044 @param priority: The name of the priority
2046 @type session: Session
2047 @param session: Optional SQLA session object (a temporary one will be
2048 generated if not supplied)
2051 @return: Priority object for the given priority
2054 q = session.query(Priority).filter_by(priority=priority)
2058 except NoResultFound:
2061 __all__.append('get_priority')
2064 def get_priorities(session=None):
2066 Returns dictionary of priority names -> id mappings
2068 @type session: Session
2069 @param session: Optional SQL session object (a temporary one will be
2070 generated if not supplied)
2073 @return: dictionary of priority names -> id mappings
2077 q = session.query(Priority)
2079 ret[x.priority] = x.priority_id
2083 __all__.append('get_priorities')
2085 ################################################################################
2087 class Section(ORMObject):
2088 def __init__(self, section = None):
2089 self.section = section
2091 def properties(self):
2092 return ['section', 'section_id', 'overrides_count']
2094 def not_null_constraints(self):
2097 def __eq__(self, val):
2098 if isinstance(val, str):
2099 return (self.section == val)
2100 # This signals to use the normal comparison operator
2101 return NotImplemented
2103 def __ne__(self, val):
2104 if isinstance(val, str):
2105 return (self.section != val)
2106 # This signals to use the normal comparison operator
2107 return NotImplemented
2109 __all__.append('Section')
2112 def get_section(section, session=None):
2114 Returns Section object for given C{section name}.
2116 @type section: string
2117 @param section: The name of the section
2119 @type session: Session
2120 @param session: Optional SQLA session object (a temporary one will be
2121 generated if not supplied)
2124 @return: Section object for the given section name
2127 q = session.query(Section).filter_by(section=section)
2131 except NoResultFound:
2134 __all__.append('get_section')
2137 def get_sections(session=None):
2139 Returns dictionary of section names -> id mappings
2141 @type session: Session
2142 @param session: Optional SQL session object (a temporary one will be
2143 generated if not supplied)
2146 @return: dictionary of section names -> id mappings
2150 q = session.query(Section)
2152 ret[x.section] = x.section_id
2156 __all__.append('get_sections')
2158 ################################################################################
2160 class SrcContents(ORMObject):
2161 def __init__(self, file = None, source = None):
2163 self.source = source
2165 def properties(self):
2166 return ['file', 'source']
2168 __all__.append('SrcContents')
2170 ################################################################################
2172 class DBSource(ORMObject):
2173 def __init__(self, source = None, version = None, maintainer = None, \
2174 changedby = None, poolfile = None, install_date = None):
2175 self.source = source
2176 self.version = version
2177 self.maintainer = maintainer
2178 self.changedby = changedby
2179 self.poolfile = poolfile
2180 self.install_date = install_date
2182 def properties(self):
2183 return ['source', 'source_id', 'maintainer', 'changedby', \
2184 'fingerprint', 'poolfile', 'version', 'suites_count', \
2185 'install_date', 'binaries_count']
2187 def not_null_constraints(self):
2188 return ['source', 'version', 'install_date', 'maintainer', \
2189 'changedby', 'poolfile', 'install_date']
2191 metadata = association_proxy('key', 'value')
2193 def scan_contents(self):
2195 Returns a set of names for non directories. The path names are
2196 normalized after converting them from either utf-8 or iso8859-1
2199 fullpath = self.poolfile.fullpath
2200 from daklib.contents import UnpackedSource
2201 unpacked = UnpackedSource(fullpath)
2203 for name in unpacked.get_all_filenames():
2204 # enforce proper utf-8 encoding
2206 name.decode('utf-8')
2207 except UnicodeDecodeError:
2208 name = name.decode('iso8859-1').encode('utf-8')
2212 __all__.append('DBSource')
2215 def source_exists(source, source_version, suites = ["any"], session=None):
2217 Ensure that source exists somewhere in the archive for the binary
2218 upload being processed.
2219 1. exact match => 1.0-3
2220 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2222 @type source: string
2223 @param source: source name
2225 @type source_version: string
2226 @param source_version: expected source version
2229 @param suites: list of suites to check in, default I{any}
2231 @type session: Session
2232 @param session: Optional SQLA session object (a temporary one will be
2233 generated if not supplied)
2236 @return: returns 1 if a source with expected version is found, otherwise 0
2243 from daklib.regexes import re_bin_only_nmu
2244 orig_source_version = re_bin_only_nmu.sub('', source_version)
2246 for suite in suites:
2247 q = session.query(DBSource).filter_by(source=source). \
2248 filter(DBSource.version.in_([source_version, orig_source_version]))
2250 # source must exist in suite X, or in some other suite that's
2251 # mapped to X, recursively... silent-maps are counted too,
2252 # unreleased-maps aren't.
2253 maps = cnf.ValueList("SuiteMappings")[:]
2255 maps = [ m.split() for m in maps ]
2256 maps = [ (x[1], x[2]) for x in maps
2257 if x[0] == "map" or x[0] == "silent-map" ]
2259 for (from_, to) in maps:
2260 if from_ in s and to not in s:
2263 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2268 # No source found so return not ok
2273 __all__.append('source_exists')
2276 def get_suites_source_in(source, session=None):
2278 Returns list of Suite objects which given C{source} name is in
2281 @param source: DBSource package name to search for
2284 @return: list of Suite objects for the given source
2287 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2289 __all__.append('get_suites_source_in')
2292 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2294 Returns list of DBSource objects for given C{source} name and other parameters
2297 @param source: DBSource package name to search for
2299 @type version: str or None
2300 @param version: DBSource version name to search for or None if not applicable
2302 @type dm_upload_allowed: bool
2303 @param dm_upload_allowed: If None, no effect. If True or False, only
2304 return packages with that dm_upload_allowed setting
2306 @type session: Session
2307 @param session: Optional SQL session object (a temporary one will be
2308 generated if not supplied)
2311 @return: list of DBSource objects for the given name (may be empty)
2314 q = session.query(DBSource).filter_by(source=source)
2316 if version is not None:
2317 q = q.filter_by(version=version)
2319 if dm_upload_allowed is not None:
2320 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2324 __all__.append('get_sources_from_name')
2326 # FIXME: This function fails badly if it finds more than 1 source package and
2327 # its implementation is trivial enough to be inlined.
2329 def get_source_in_suite(source, suite, session=None):
2331 Returns a DBSource object for a combination of C{source} and C{suite}.
2333 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2334 - B{suite} - a suite name, eg. I{unstable}
2336 @type source: string
2337 @param source: source package name
2340 @param suite: the suite name
2343 @return: the version for I{source} in I{suite}
2347 q = get_suite(suite, session).get_sources(source)
2350 except NoResultFound:
2353 __all__.append('get_source_in_suite')
2355 ################################################################################
2358 def add_dsc_to_db(u, filename, session=None):
2359 entry = u.pkg.files[filename]
2363 source.source = u.pkg.dsc["source"]
2364 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2365 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2366 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2367 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2368 source.install_date = datetime.now().date()
2370 dsc_component = entry["component"]
2371 dsc_location_id = entry["location id"]
2373 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2375 # Set up a new poolfile if necessary
2376 if not entry.has_key("files id") or not entry["files id"]:
2377 filename = entry["pool name"] + filename
2378 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2380 pfs.append(poolfile)
2381 entry["files id"] = poolfile.file_id
2383 source.poolfile_id = entry["files id"]
2386 suite_names = u.pkg.changes["distribution"].keys()
2387 source.suites = session.query(Suite). \
2388 filter(Suite.suite_name.in_(suite_names)).all()
2390 # Add the source files to the DB (files and dsc_files)
2392 dscfile.source_id = source.source_id
2393 dscfile.poolfile_id = entry["files id"]
2394 session.add(dscfile)
2396 for dsc_file, dentry in u.pkg.dsc_files.items():
2398 df.source_id = source.source_id
2400 # If the .orig tarball is already in the pool, it's
2401 # files id is stored in dsc_files by check_dsc().
2402 files_id = dentry.get("files id", None)
2404 # Find the entry in the files hash
2405 # TODO: Bail out here properly
2407 for f, e in u.pkg.files.items():
2412 if files_id is None:
2413 filename = dfentry["pool name"] + dsc_file
2415 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2416 # FIXME: needs to check for -1/-2 and or handle exception
2417 if found and obj is not None:
2418 files_id = obj.file_id
2421 # If still not found, add it
2422 if files_id is None:
2423 # HACK: Force sha1sum etc into dentry
2424 dentry["sha1sum"] = dfentry["sha1sum"]
2425 dentry["sha256sum"] = dfentry["sha256sum"]
2426 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2427 pfs.append(poolfile)
2428 files_id = poolfile.file_id
2430 poolfile = get_poolfile_by_id(files_id, session)
2431 if poolfile is None:
2432 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2433 pfs.append(poolfile)
2435 df.poolfile_id = files_id
2438 # Add the src_uploaders to the DB
2439 uploader_ids = [source.maintainer_id]
2440 if u.pkg.dsc.has_key("uploaders"):
2441 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2443 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2446 for up_id in uploader_ids:
2447 if added_ids.has_key(up_id):
2449 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2455 su.maintainer_id = up_id
2456 su.source_id = source.source_id
2461 return source, dsc_component, dsc_location_id, pfs
2463 __all__.append('add_dsc_to_db')
2466 def add_deb_to_db(u, filename, session=None):
2468 Contrary to what you might expect, this routine deals with both
2469 debs and udebs. That info is in 'dbtype', whilst 'type' is
2470 'deb' for both of them
2473 entry = u.pkg.files[filename]
2476 bin.package = entry["package"]
2477 bin.version = entry["version"]
2478 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2479 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2480 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2481 bin.binarytype = entry["dbtype"]
2484 filename = entry["pool name"] + filename
2485 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2486 if not entry.get("location id", None):
2487 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2489 if entry.get("files id", None):
2490 poolfile = get_poolfile_by_id(bin.poolfile_id)
2491 bin.poolfile_id = entry["files id"]
2493 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2494 bin.poolfile_id = entry["files id"] = poolfile.file_id
2497 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2498 if len(bin_sources) != 1:
2499 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2500 (bin.package, bin.version, entry["architecture"],
2501 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2503 bin.source_id = bin_sources[0].source_id
2505 if entry.has_key("built-using"):
2506 for srcname, version in entry["built-using"]:
2507 exsources = get_sources_from_name(srcname, version, session=session)
2508 if len(exsources) != 1:
2509 raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2510 (srcname, version, bin.package, bin.version, entry["architecture"],
2511 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2513 bin.extra_sources.append(exsources[0])
2515 # Add and flush object so it has an ID
2518 suite_names = u.pkg.changes["distribution"].keys()
2519 bin.suites = session.query(Suite). \
2520 filter(Suite.suite_name.in_(suite_names)).all()
2524 # Deal with contents - disabled for now
2525 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2527 # print "REJECT\nCould not determine contents of package %s" % bin.package
2528 # session.rollback()
2529 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2533 __all__.append('add_deb_to_db')
2535 ################################################################################
2537 class SourceACL(object):
2538 def __init__(self, *args, **kwargs):
2542 return '<SourceACL %s>' % self.source_acl_id
2544 __all__.append('SourceACL')
2546 ################################################################################
2548 class SrcFormat(object):
2549 def __init__(self, *args, **kwargs):
2553 return '<SrcFormat %s>' % (self.format_name)
2555 __all__.append('SrcFormat')
2557 ################################################################################
2559 class SrcUploader(object):
2560 def __init__(self, *args, **kwargs):
2564 return '<SrcUploader %s>' % self.uploader_id
2566 __all__.append('SrcUploader')
2568 ################################################################################
2570 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2571 ('SuiteID', 'suite_id'),
2572 ('Version', 'version'),
2573 ('Origin', 'origin'),
2575 ('Description', 'description'),
2576 ('Untouchable', 'untouchable'),
2577 ('Announce', 'announce'),
2578 ('Codename', 'codename'),
2579 ('OverrideCodename', 'overridecodename'),
2580 ('ValidTime', 'validtime'),
2581 ('Priority', 'priority'),
2582 ('NotAutomatic', 'notautomatic'),
2583 ('CopyChanges', 'copychanges'),
2584 ('OverrideSuite', 'overridesuite')]
2586 # Why the heck don't we have any UNIQUE constraints in table suite?
2587 # TODO: Add UNIQUE constraints for appropriate columns.
2588 class Suite(ORMObject):
2589 def __init__(self, suite_name = None, version = None):
2590 self.suite_name = suite_name
2591 self.version = version
2593 def properties(self):
2594 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2597 def not_null_constraints(self):
2598 return ['suite_name', 'version']
2600 def __eq__(self, val):
2601 if isinstance(val, str):
2602 return (self.suite_name == val)
2603 # This signals to use the normal comparison operator
2604 return NotImplemented
2606 def __ne__(self, val):
2607 if isinstance(val, str):
2608 return (self.suite_name != val)
2609 # This signals to use the normal comparison operator
2610 return NotImplemented
2614 for disp, field in SUITE_FIELDS:
2615 val = getattr(self, field, None)
2617 ret.append("%s: %s" % (disp, val))
2619 return "\n".join(ret)
2621 def get_architectures(self, skipsrc=False, skipall=False):
2623 Returns list of Architecture objects
2625 @type skipsrc: boolean
2626 @param skipsrc: Whether to skip returning the 'source' architecture entry
2629 @type skipall: boolean
2630 @param skipall: Whether to skip returning the 'all' architecture entry
2634 @return: list of Architecture objects for the given name (may be empty)
2637 q = object_session(self).query(Architecture).with_parent(self)
2639 q = q.filter(Architecture.arch_string != 'source')
2641 q = q.filter(Architecture.arch_string != 'all')
2642 return q.order_by(Architecture.arch_string).all()
2644 def get_sources(self, source):
2646 Returns a query object representing DBSource that is part of C{suite}.
2648 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2650 @type source: string
2651 @param source: source package name
2653 @rtype: sqlalchemy.orm.query.Query
2654 @return: a query of DBSource
2658 session = object_session(self)
2659 return session.query(DBSource).filter_by(source = source). \
2662 __all__.append('Suite')
2665 def get_suite(suite, session=None):
2667 Returns Suite object for given C{suite name}.
2670 @param suite: The name of the suite
2672 @type session: Session
2673 @param session: Optional SQLA session object (a temporary one will be
2674 generated if not supplied)
2677 @return: Suite object for the requested suite name (None if not present)
2680 q = session.query(Suite).filter_by(suite_name=suite)
2684 except NoResultFound:
2687 __all__.append('get_suite')
2689 ################################################################################
2691 # TODO: should be removed because the implementation is too trivial
2693 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2695 Returns list of Architecture objects for given C{suite} name
2698 @param suite: Suite name to search for
2700 @type skipsrc: boolean
2701 @param skipsrc: Whether to skip returning the 'source' architecture entry
2704 @type skipall: boolean
2705 @param skipall: Whether to skip returning the 'all' architecture entry
2708 @type session: Session
2709 @param session: Optional SQL session object (a temporary one will be
2710 generated if not supplied)
2713 @return: list of Architecture objects for the given name (may be empty)
2716 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2718 __all__.append('get_suite_architectures')
2720 ################################################################################
2722 class SuiteSrcFormat(object):
2723 def __init__(self, *args, **kwargs):
2727 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2729 __all__.append('SuiteSrcFormat')
2732 def get_suite_src_formats(suite, session=None):
2734 Returns list of allowed SrcFormat for C{suite}.
2737 @param suite: Suite name to search for
2739 @type session: Session
2740 @param session: Optional SQL session object (a temporary one will be
2741 generated if not supplied)
2744 @return: the list of allowed source formats for I{suite}
2747 q = session.query(SrcFormat)
2748 q = q.join(SuiteSrcFormat)
2749 q = q.join(Suite).filter_by(suite_name=suite)
2750 q = q.order_by('format_name')
2754 __all__.append('get_suite_src_formats')
2756 ################################################################################
2758 class Uid(ORMObject):
2759 def __init__(self, uid = None, name = None):
2763 def __eq__(self, val):
2764 if isinstance(val, str):
2765 return (self.uid == val)
2766 # This signals to use the normal comparison operator
2767 return NotImplemented
2769 def __ne__(self, val):
2770 if isinstance(val, str):
2771 return (self.uid != val)
2772 # This signals to use the normal comparison operator
2773 return NotImplemented
2775 def properties(self):
2776 return ['uid', 'name', 'fingerprint']
2778 def not_null_constraints(self):
2781 __all__.append('Uid')
2784 def get_or_set_uid(uidname, session=None):
2786 Returns uid object for given uidname.
2788 If no matching uidname is found, a row is inserted.
2790 @type uidname: string
2791 @param uidname: The uid to add
2793 @type session: SQLAlchemy
2794 @param session: Optional SQL session object (a temporary one will be
2795 generated if not supplied). If not passed, a commit will be performed at
2796 the end of the function, otherwise the caller is responsible for commiting.
2799 @return: the uid object for the given uidname
2802 q = session.query(Uid).filter_by(uid=uidname)
2806 except NoResultFound:
2810 session.commit_or_flush()
2815 __all__.append('get_or_set_uid')
2818 def get_uid_from_fingerprint(fpr, session=None):
2819 q = session.query(Uid)
2820 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2824 except NoResultFound:
2827 __all__.append('get_uid_from_fingerprint')
2829 ################################################################################
2831 class UploadBlock(object):
2832 def __init__(self, *args, **kwargs):
2836 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2838 __all__.append('UploadBlock')
2840 ################################################################################
2842 class MetadataKey(ORMObject):
2843 def __init__(self, key = None):
2846 def properties(self):
2849 def not_null_constraints(self):
2852 __all__.append('MetadataKey')
2854 ################################################################################
2856 class BinaryMetadata(ORMObject):
2857 def __init__(self, key = None, value = None, binary = None):
2860 self.binary = binary
2862 def properties(self):
2863 return ['binary', 'key', 'value']
2865 def not_null_constraints(self):
2868 __all__.append('BinaryMetadata')
2870 ################################################################################
2872 class SourceMetadata(ORMObject):
2873 def __init__(self, key = None, value = None, source = None):
2876 self.source = source
2878 def properties(self):
2879 return ['source', 'key', 'value']
2881 def not_null_constraints(self):
2884 __all__.append('SourceMetadata')
2886 ################################################################################
2888 class DBConn(object):
2890 database module init.
2894 def __init__(self, *args, **kwargs):
2895 self.__dict__ = self.__shared_state
2897 if not getattr(self, 'initialised', False):
2898 self.initialised = True
2899 self.debug = kwargs.has_key('debug')
2902 def __setuptables(self):
2909 'binaries_metadata',
2913 'build_queue_files',
2918 'changes_pending_binaries',
2919 'changes_pending_files',
2920 'changes_pending_source',
2921 'changes_pending_files_map',
2922 'changes_pending_source_files',
2923 'changes_pool_files',
2925 'extra_src_references',
2934 # TODO: the maintainer column in table override should be removed.
2948 'suite_architectures',
2949 'suite_build_queue_copy',
2950 'suite_src_formats',
2956 'almost_obsolete_all_associations',
2957 'almost_obsolete_src_associations',
2958 'any_associations_source',
2959 'bin_assoc_by_arch',
2960 'bin_associations_binaries',
2961 'binaries_suite_arch',
2962 'binfiles_suite_component_arch',
2965 'newest_all_associations',
2966 'newest_any_associations',
2968 'newest_src_association',
2969 'obsolete_all_associations',
2970 'obsolete_any_associations',
2971 'obsolete_any_by_all_associations',
2972 'obsolete_src_associations',
2974 'src_associations_bin',
2975 'src_associations_src',
2976 'suite_arch_by_name',
2979 for table_name in tables:
2980 table = Table(table_name, self.db_meta, \
2981 autoload=True, useexisting=True)
2982 setattr(self, 'tbl_%s' % table_name, table)
2984 for view_name in views:
2985 view = Table(view_name, self.db_meta, autoload=True)
2986 setattr(self, 'view_%s' % view_name, view)
2988 def __setupmappers(self):
2989 mapper(Architecture, self.tbl_architecture,
2990 properties = dict(arch_id = self.tbl_architecture.c.id,
2991 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2992 order_by='suite_name',
2993 backref=backref('architectures', order_by='arch_string'))),
2994 extension = validator)
2996 mapper(Archive, self.tbl_archive,
2997 properties = dict(archive_id = self.tbl_archive.c.id,
2998 archive_name = self.tbl_archive.c.name))
3000 mapper(BuildQueue, self.tbl_build_queue,
3001 properties = dict(queue_id = self.tbl_build_queue.c.id))
3003 mapper(BuildQueueFile, self.tbl_build_queue_files,
3004 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3005 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3007 mapper(DBBinary, self.tbl_binaries,
3008 properties = dict(binary_id = self.tbl_binaries.c.id,
3009 package = self.tbl_binaries.c.package,
3010 version = self.tbl_binaries.c.version,
3011 maintainer_id = self.tbl_binaries.c.maintainer,
3012 maintainer = relation(Maintainer),
3013 source_id = self.tbl_binaries.c.source,
3014 source = relation(DBSource, backref='binaries'),
3015 arch_id = self.tbl_binaries.c.architecture,
3016 architecture = relation(Architecture),
3017 poolfile_id = self.tbl_binaries.c.file,
3018 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3019 binarytype = self.tbl_binaries.c.type,
3020 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3021 fingerprint = relation(Fingerprint),
3022 install_date = self.tbl_binaries.c.install_date,
3023 suites = relation(Suite, secondary=self.tbl_bin_associations,
3024 backref=backref('binaries', lazy='dynamic')),
3025 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3026 backref=backref('extra_binary_references', lazy='dynamic')),
3027 key = relation(BinaryMetadata, cascade='all',
3028 collection_class=attribute_mapped_collection('key'))),
3029 extension = validator)
3031 mapper(BinaryACL, self.tbl_binary_acl,
3032 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3034 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3035 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3036 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3037 architecture = relation(Architecture)))
3039 mapper(Component, self.tbl_component,
3040 properties = dict(component_id = self.tbl_component.c.id,
3041 component_name = self.tbl_component.c.name),
3042 extension = validator)
3044 mapper(DBConfig, self.tbl_config,
3045 properties = dict(config_id = self.tbl_config.c.id))
3047 mapper(DSCFile, self.tbl_dsc_files,
3048 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3049 source_id = self.tbl_dsc_files.c.source,
3050 source = relation(DBSource),
3051 poolfile_id = self.tbl_dsc_files.c.file,
3052 poolfile = relation(PoolFile)))
3054 mapper(PoolFile, self.tbl_files,
3055 properties = dict(file_id = self.tbl_files.c.id,
3056 filesize = self.tbl_files.c.size,
3057 location_id = self.tbl_files.c.location,
3058 location = relation(Location,
3059 # using lazy='dynamic' in the back
3060 # reference because we have A LOT of
3061 # files in one location
3062 backref=backref('files', lazy='dynamic'))),
3063 extension = validator)
3065 mapper(Fingerprint, self.tbl_fingerprint,
3066 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3067 uid_id = self.tbl_fingerprint.c.uid,
3068 uid = relation(Uid),
3069 keyring_id = self.tbl_fingerprint.c.keyring,
3070 keyring = relation(Keyring),
3071 source_acl = relation(SourceACL),
3072 binary_acl = relation(BinaryACL)),
3073 extension = validator)
3075 mapper(Keyring, self.tbl_keyrings,
3076 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3077 keyring_id = self.tbl_keyrings.c.id))
3079 mapper(DBChange, self.tbl_changes,
3080 properties = dict(change_id = self.tbl_changes.c.id,
3081 poolfiles = relation(PoolFile,
3082 secondary=self.tbl_changes_pool_files,
3083 backref="changeslinks"),
3084 seen = self.tbl_changes.c.seen,
3085 source = self.tbl_changes.c.source,
3086 binaries = self.tbl_changes.c.binaries,
3087 architecture = self.tbl_changes.c.architecture,
3088 distribution = self.tbl_changes.c.distribution,
3089 urgency = self.tbl_changes.c.urgency,
3090 maintainer = self.tbl_changes.c.maintainer,
3091 changedby = self.tbl_changes.c.changedby,
3092 date = self.tbl_changes.c.date,
3093 version = self.tbl_changes.c.version,
3094 files = relation(ChangePendingFile,
3095 secondary=self.tbl_changes_pending_files_map,
3096 backref="changesfile"),
3097 in_queue_id = self.tbl_changes.c.in_queue,
3098 in_queue = relation(PolicyQueue,
3099 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3100 approved_for_id = self.tbl_changes.c.approved_for))
3102 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3103 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3105 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3106 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3107 filename = self.tbl_changes_pending_files.c.filename,
3108 size = self.tbl_changes_pending_files.c.size,
3109 md5sum = self.tbl_changes_pending_files.c.md5sum,
3110 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3111 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3113 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3114 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3115 change = relation(DBChange),
3116 maintainer = relation(Maintainer,
3117 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3118 changedby = relation(Maintainer,
3119 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3120 fingerprint = relation(Fingerprint),
3121 source_files = relation(ChangePendingFile,
3122 secondary=self.tbl_changes_pending_source_files,
3123 backref="pending_sources")))
3126 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3127 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3128 keyring = relation(Keyring, backref="keyring_acl_map"),
3129 architecture = relation(Architecture)))
3131 mapper(Location, self.tbl_location,
3132 properties = dict(location_id = self.tbl_location.c.id,
3133 component_id = self.tbl_location.c.component,
3134 component = relation(Component, backref='location'),
3135 archive_id = self.tbl_location.c.archive,
3136 archive = relation(Archive),
3137 # FIXME: the 'type' column is old cruft and
3138 # should be removed in the future.
3139 archive_type = self.tbl_location.c.type),
3140 extension = validator)
3142 mapper(Maintainer, self.tbl_maintainer,
3143 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3144 maintains_sources = relation(DBSource, backref='maintainer',
3145 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3146 changed_sources = relation(DBSource, backref='changedby',
3147 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3148 extension = validator)
3150 mapper(NewComment, self.tbl_new_comments,
3151 properties = dict(comment_id = self.tbl_new_comments.c.id))
3153 mapper(Override, self.tbl_override,
3154 properties = dict(suite_id = self.tbl_override.c.suite,
3155 suite = relation(Suite, \
3156 backref=backref('overrides', lazy='dynamic')),
3157 package = self.tbl_override.c.package,
3158 component_id = self.tbl_override.c.component,
3159 component = relation(Component, \
3160 backref=backref('overrides', lazy='dynamic')),
3161 priority_id = self.tbl_override.c.priority,
3162 priority = relation(Priority, \
3163 backref=backref('overrides', lazy='dynamic')),
3164 section_id = self.tbl_override.c.section,
3165 section = relation(Section, \
3166 backref=backref('overrides', lazy='dynamic')),
3167 overridetype_id = self.tbl_override.c.type,
3168 overridetype = relation(OverrideType, \
3169 backref=backref('overrides', lazy='dynamic'))))
3171 mapper(OverrideType, self.tbl_override_type,
3172 properties = dict(overridetype = self.tbl_override_type.c.type,
3173 overridetype_id = self.tbl_override_type.c.id))
3175 mapper(PolicyQueue, self.tbl_policy_queue,
3176 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3178 mapper(Priority, self.tbl_priority,
3179 properties = dict(priority_id = self.tbl_priority.c.id))
3181 mapper(Section, self.tbl_section,
3182 properties = dict(section_id = self.tbl_section.c.id,
3183 section=self.tbl_section.c.section))
3185 mapper(DBSource, self.tbl_source,
3186 properties = dict(source_id = self.tbl_source.c.id,
3187 version = self.tbl_source.c.version,
3188 maintainer_id = self.tbl_source.c.maintainer,
3189 poolfile_id = self.tbl_source.c.file,
3190 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3191 fingerprint_id = self.tbl_source.c.sig_fpr,
3192 fingerprint = relation(Fingerprint),
3193 changedby_id = self.tbl_source.c.changedby,
3194 srcfiles = relation(DSCFile,
3195 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3196 suites = relation(Suite, secondary=self.tbl_src_associations,
3197 backref=backref('sources', lazy='dynamic')),
3198 srcuploaders = relation(SrcUploader),
3199 key = relation(SourceMetadata, cascade='all',
3200 collection_class=attribute_mapped_collection('key'))),
3201 extension = validator)
3203 mapper(SourceACL, self.tbl_source_acl,
3204 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3206 mapper(SrcFormat, self.tbl_src_format,
3207 properties = dict(src_format_id = self.tbl_src_format.c.id,
3208 format_name = self.tbl_src_format.c.format_name))
3210 mapper(SrcUploader, self.tbl_src_uploaders,
3211 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3212 source_id = self.tbl_src_uploaders.c.source,
3213 source = relation(DBSource,
3214 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3215 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3216 maintainer = relation(Maintainer,
3217 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3219 mapper(Suite, self.tbl_suite,
3220 properties = dict(suite_id = self.tbl_suite.c.id,
3221 policy_queue = relation(PolicyQueue),
3222 copy_queues = relation(BuildQueue,
3223 secondary=self.tbl_suite_build_queue_copy)),
3224 extension = validator)
3226 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3227 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3228 suite = relation(Suite, backref='suitesrcformats'),
3229 src_format_id = self.tbl_suite_src_formats.c.src_format,
3230 src_format = relation(SrcFormat)))
3232 mapper(Uid, self.tbl_uid,
3233 properties = dict(uid_id = self.tbl_uid.c.id,
3234 fingerprint = relation(Fingerprint)),
3235 extension = validator)
3237 mapper(UploadBlock, self.tbl_upload_blocks,
3238 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3239 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3240 uid = relation(Uid, backref="uploadblocks")))
3242 mapper(BinContents, self.tbl_bin_contents,
3244 binary = relation(DBBinary,
3245 backref=backref('contents', lazy='dynamic', cascade='all')),
3246 file = self.tbl_bin_contents.c.file))
3248 mapper(SrcContents, self.tbl_src_contents,
3250 source = relation(DBSource,
3251 backref=backref('contents', lazy='dynamic', cascade='all')),
3252 file = self.tbl_src_contents.c.file))
3254 mapper(MetadataKey, self.tbl_metadata_keys,
3256 key_id = self.tbl_metadata_keys.c.key_id,
3257 key = self.tbl_metadata_keys.c.key))
3259 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3261 binary_id = self.tbl_binaries_metadata.c.bin_id,
3262 binary = relation(DBBinary),
3263 key_id = self.tbl_binaries_metadata.c.key_id,
3264 key = relation(MetadataKey),
3265 value = self.tbl_binaries_metadata.c.value))
3267 mapper(SourceMetadata, self.tbl_source_metadata,
3269 source_id = self.tbl_source_metadata.c.src_id,
3270 source = relation(DBSource),
3271 key_id = self.tbl_source_metadata.c.key_id,
3272 key = relation(MetadataKey),
3273 value = self.tbl_source_metadata.c.value))
3275 ## Connection functions
3276 def __createconn(self):
3277 from config import Config
3279 if cnf.has_key("DB::Service"):
3280 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3281 elif cnf.has_key("DB::Host"):
3283 connstr = "postgresql://%s" % cnf["DB::Host"]
3284 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3285 connstr += ":%s" % cnf["DB::Port"]
3286 connstr += "/%s" % cnf["DB::Name"]
3289 connstr = "postgresql:///%s" % cnf["DB::Name"]
3290 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3291 connstr += "?port=%s" % cnf["DB::Port"]
3293 engine_args = { 'echo': self.debug }
3294 if cnf.has_key('DB::PoolSize'):
3295 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3296 if cnf.has_key('DB::MaxOverflow'):
3297 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3298 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3299 cnf['DB::Unicode'] == 'false':
3300 engine_args['use_native_unicode'] = False
3302 # Monkey patch a new dialect in in order to support service= syntax
3303 import sqlalchemy.dialects.postgresql
3304 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3305 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3306 def create_connect_args(self, url):
3307 if str(url).startswith('postgresql://service='):
3309 servicename = str(url)[21:]
3310 return (['service=%s' % servicename], {})
3312 return PGDialect_psycopg2.create_connect_args(self, url)
3314 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3316 self.db_pg = create_engine(connstr, **engine_args)
3317 self.db_meta = MetaData()
3318 self.db_meta.bind = self.db_pg
3319 self.db_smaker = sessionmaker(bind=self.db_pg,
3323 self.__setuptables()
3324 self.__setupmappers()
3325 self.pid = os.getpid()
3328 # reinitialize DBConn in new processes
3329 if self.pid != os.getpid():
3332 return self.db_smaker()
3334 __all__.append('DBConn')