5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
63 from sqlalchemy import types as sqltypes
64 from sqlalchemy.orm.collections import attribute_mapped_collection
65 from sqlalchemy.ext.associationproxy import association_proxy
67 # Don't remove this, we re-export the exceptions to scripts which import us
68 from sqlalchemy.exc import *
69 from sqlalchemy.orm.exc import NoResultFound
71 # Only import Config until Queue stuff is changed to store its config
73 from config import Config
74 from textutils import fix_maintainer
75 from dak_exceptions import DBUpdateError, NoSourceFieldError
77 # suppress some deprecation warnings in squeeze related to sqlalchemy
79 warnings.filterwarnings('ignore', \
80 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 ################################################################################
86 # Patch in support for the debversion field type so that it works during
90 # that is for sqlalchemy 0.6
91 UserDefinedType = sqltypes.UserDefinedType
93 # this one for sqlalchemy 0.5
94 UserDefinedType = sqltypes.TypeEngine
96 class DebVersion(UserDefinedType):
97 def get_col_spec(self):
100 def bind_processor(self, dialect):
103 # ' = None' is needed for sqlalchemy 0.5:
104 def result_processor(self, dialect, coltype = None):
107 sa_major_version = sqlalchemy.__version__[0:3]
108 if sa_major_version in ["0.5", "0.6"]:
109 from sqlalchemy.databases import postgres
110 postgres.ischema_names['debversion'] = DebVersion
112 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
114 ################################################################################
116 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
118 ################################################################################
120 def session_wrapper(fn):
122 Wrapper around common ".., session=None):" handling. If the wrapped
123 function is called without passing 'session', we create a local one
124 and destroy it when the function ends.
126 Also attaches a commit_or_flush method to the session; if we created a
127 local session, this is a synonym for session.commit(), otherwise it is a
128 synonym for session.flush().
131 def wrapped(*args, **kwargs):
132 private_transaction = False
134 # Find the session object
135 session = kwargs.get('session')
138 if len(args) <= len(getargspec(fn)[0]) - 1:
139 # No session specified as last argument or in kwargs
140 private_transaction = True
141 session = kwargs['session'] = DBConn().session()
143 # Session is last argument in args
147 session = args[-1] = DBConn().session()
148 private_transaction = True
150 if private_transaction:
151 session.commit_or_flush = session.commit
153 session.commit_or_flush = session.flush
156 return fn(*args, **kwargs)
158 if private_transaction:
159 # We created a session; close it.
162 wrapped.__doc__ = fn.__doc__
163 wrapped.func_name = fn.func_name
167 __all__.append('session_wrapper')
169 ################################################################################
171 class ORMObject(object):
173 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
174 derived classes must implement the properties() method.
177 def properties(self):
179 This method should be implemented by all derived classes and returns a
180 list of the important properties. The properties 'created' and
181 'modified' will be added automatically. A suffix '_count' should be
182 added to properties that are lists or query objects. The most important
183 property name should be returned as the first element in the list
184 because it is used by repr().
190 Returns a JSON representation of the object based on the properties
191 returned from the properties() method.
194 # add created and modified
195 all_properties = self.properties() + ['created', 'modified']
196 for property in all_properties:
197 # check for list or query
198 if property[-6:] == '_count':
199 real_property = property[:-6]
200 if not hasattr(self, real_property):
202 value = getattr(self, real_property)
203 if hasattr(value, '__len__'):
206 elif hasattr(value, 'count'):
208 value = value.count()
210 raise KeyError('Do not understand property %s.' % property)
212 if not hasattr(self, property):
215 value = getattr(self, property)
219 elif isinstance(value, ORMObject):
220 # use repr() for ORMObject types
223 # we want a string for all other types because json cannot
226 data[property] = value
227 return json.dumps(data)
231 Returns the name of the class.
233 return type(self).__name__
237 Returns a short string representation of the object using the first
238 element from the properties() method.
240 primary_property = self.properties()[0]
241 value = getattr(self, primary_property)
242 return '<%s %s>' % (self.classname(), str(value))
246 Returns a human readable form of the object using the properties()
249 return '<%s %s>' % (self.classname(), self.json())
251 def not_null_constraints(self):
253 Returns a list of properties that must be not NULL. Derived classes
254 should override this method if needed.
258 validation_message = \
259 "Validation failed because property '%s' must not be empty in object\n%s"
263 This function validates the not NULL constraints as returned by
264 not_null_constraints(). It raises the DBUpdateError exception if
267 for property in self.not_null_constraints():
268 # TODO: It is a bit awkward that the mapper configuration allow
269 # directly setting the numeric _id columns. We should get rid of it
271 if hasattr(self, property + '_id') and \
272 getattr(self, property + '_id') is not None:
274 if not hasattr(self, property) or getattr(self, property) is None:
275 raise DBUpdateError(self.validation_message % \
276 (property, str(self)))
280 def get(cls, primary_key, session = None):
282 This is a support function that allows getting an object by its primary
285 Architecture.get(3[, session])
287 instead of the more verbose
289 session.query(Architecture).get(3)
291 return session.query(cls).get(primary_key)
293 def session(self, replace = False):
295 Returns the current session that is associated with the object. May
296 return None is object is in detached state.
299 return object_session(self)
301 def clone(self, session = None):
303 Clones the current object in a new session and returns the new clone. A
304 fresh session is created if the optional session parameter is not
305 provided. The function will fail if a session is provided and has
308 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
309 an existing object to allow several threads to work with their own
310 instances of an ORMObject.
312 WARNING: Only persistent (committed) objects can be cloned. Changes
313 made to the original object that are not committed yet will get lost.
314 The session of the new object will always be rolled back to avoid
318 if self.session() is None:
319 raise RuntimeError( \
320 'Method clone() failed for detached object:\n%s' % self)
321 self.session().flush()
322 mapper = object_mapper(self)
323 primary_key = mapper.primary_key_from_instance(self)
324 object_class = self.__class__
326 session = DBConn().session()
327 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
328 raise RuntimeError( \
329 'Method clone() failed due to unflushed changes in session.')
330 new_object = session.query(object_class).get(primary_key)
332 if new_object is None:
333 raise RuntimeError( \
334 'Method clone() failed for non-persistent object:\n%s' % self)
337 __all__.append('ORMObject')
339 ################################################################################
341 class Validator(MapperExtension):
343 This class calls the validate() method for each instance for the
344 'before_update' and 'before_insert' events. A global object validator is
345 used for configuring the individual mappers.
348 def before_update(self, mapper, connection, instance):
352 def before_insert(self, mapper, connection, instance):
356 validator = Validator()
358 ################################################################################
360 class Architecture(ORMObject):
361 def __init__(self, arch_string = None, description = None):
362 self.arch_string = arch_string
363 self.description = description
365 def __eq__(self, val):
366 if isinstance(val, str):
367 return (self.arch_string== val)
368 # This signals to use the normal comparison operator
369 return NotImplemented
371 def __ne__(self, val):
372 if isinstance(val, str):
373 return (self.arch_string != val)
374 # This signals to use the normal comparison operator
375 return NotImplemented
377 def properties(self):
378 return ['arch_string', 'arch_id', 'suites_count']
380 def not_null_constraints(self):
381 return ['arch_string']
383 __all__.append('Architecture')
386 def get_architecture(architecture, session=None):
388 Returns database id for given C{architecture}.
390 @type architecture: string
391 @param architecture: The name of the architecture
393 @type session: Session
394 @param session: Optional SQLA session object (a temporary one will be
395 generated if not supplied)
398 @return: Architecture object for the given arch (None if not present)
401 q = session.query(Architecture).filter_by(arch_string=architecture)
405 except NoResultFound:
408 __all__.append('get_architecture')
410 # TODO: should be removed because the implementation is too trivial
412 def get_architecture_suites(architecture, session=None):
414 Returns list of Suite objects for given C{architecture} name
416 @type architecture: str
417 @param architecture: Architecture name to search for
419 @type session: Session
420 @param session: Optional SQL session object (a temporary one will be
421 generated if not supplied)
424 @return: list of Suite objects for the given name (may be empty)
427 return get_architecture(architecture, session).suites
429 __all__.append('get_architecture_suites')
431 ################################################################################
433 class Archive(object):
434 def __init__(self, *args, **kwargs):
438 return '<Archive %s>' % self.archive_name
440 __all__.append('Archive')
443 def get_archive(archive, session=None):
445 returns database id for given C{archive}.
447 @type archive: string
448 @param archive: the name of the arhive
450 @type session: Session
451 @param session: Optional SQLA session object (a temporary one will be
452 generated if not supplied)
455 @return: Archive object for the given name (None if not present)
458 archive = archive.lower()
460 q = session.query(Archive).filter_by(archive_name=archive)
464 except NoResultFound:
467 __all__.append('get_archive')
469 ################################################################################
471 class BinContents(ORMObject):
472 def __init__(self, file = None, binary = None):
476 def properties(self):
477 return ['file', 'binary']
479 __all__.append('BinContents')
481 ################################################################################
483 class DBBinary(ORMObject):
484 def __init__(self, package = None, source = None, version = None, \
485 maintainer = None, architecture = None, poolfile = None, \
487 self.package = package
489 self.version = version
490 self.maintainer = maintainer
491 self.architecture = architecture
492 self.poolfile = poolfile
493 self.binarytype = binarytype
495 def properties(self):
496 return ['package', 'version', 'maintainer', 'source', 'architecture', \
497 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
498 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
500 def not_null_constraints(self):
501 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
504 metadata = association_proxy('key', 'value')
506 def get_component_name(self):
507 return self.poolfile.location.component.component_name
509 def scan_contents(self):
511 Yields the contents of the package. Only regular files are yielded and
512 the path names are normalized after converting them from either utf-8
513 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
514 package does not contain any regular file.
516 fullpath = self.poolfile.fullpath
517 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE)
518 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
519 for member in tar.getmembers():
520 if not member.isdir():
521 name = normpath(member.name)
522 # enforce proper utf-8 encoding
525 except UnicodeDecodeError:
526 name = name.decode('iso8859-1').encode('utf-8')
532 __all__.append('DBBinary')
535 def get_suites_binary_in(package, session=None):
537 Returns list of Suite objects which given C{package} name is in
540 @param package: DBBinary package name to search for
543 @return: list of Suite objects for the given package
546 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
548 __all__.append('get_suites_binary_in')
551 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
553 Returns the component name of the newest binary package in suite_list or
554 None if no package is found. The result can be optionally filtered by a list
555 of architecture names.
558 @param package: DBBinary package name to search for
560 @type suite_list: list of str
561 @param suite_list: list of suite_name items
563 @type arch_list: list of str
564 @param arch_list: optional list of arch_string items that defaults to []
566 @rtype: str or NoneType
567 @return: name of component or None
570 q = session.query(DBBinary).filter_by(package = package). \
571 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
572 if len(arch_list) > 0:
573 q = q.join(DBBinary.architecture). \
574 filter(Architecture.arch_string.in_(arch_list))
575 binary = q.order_by(desc(DBBinary.version)).first()
579 return binary.get_component_name()
581 __all__.append('get_component_by_package_suite')
583 ################################################################################
585 class BinaryACL(object):
586 def __init__(self, *args, **kwargs):
590 return '<BinaryACL %s>' % self.binary_acl_id
592 __all__.append('BinaryACL')
594 ################################################################################
596 class BinaryACLMap(object):
597 def __init__(self, *args, **kwargs):
601 return '<BinaryACLMap %s>' % self.binary_acl_map_id
603 __all__.append('BinaryACLMap')
605 ################################################################################
610 ArchiveDir "%(archivepath)s";
611 OverrideDir "%(overridedir)s";
612 CacheDir "%(cachedir)s";
617 Packages::Compress ". bzip2 gzip";
618 Sources::Compress ". bzip2 gzip";
623 bindirectory "incoming"
628 BinOverride "override.sid.all3";
629 BinCacheDB "packages-accepted.db";
631 FileList "%(filelist)s";
634 Packages::Extensions ".deb .udeb";
637 bindirectory "incoming/"
640 BinOverride "override.sid.all3";
641 SrcOverride "override.sid.all3.src";
642 FileList "%(filelist)s";
646 class BuildQueue(object):
647 def __init__(self, *args, **kwargs):
651 return '<BuildQueue %s>' % self.queue_name
653 def write_metadata(self, starttime, force=False):
654 # Do we write out metafiles?
655 if not (force or self.generate_metadata):
658 session = DBConn().session().object_session(self)
660 fl_fd = fl_name = ac_fd = ac_name = None
662 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
663 startdir = os.getcwd()
666 # Grab files we want to include
667 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
668 # Write file list with newer files
669 (fl_fd, fl_name) = mkstemp()
671 os.write(fl_fd, '%s\n' % n.fullpath)
676 # Write minimal apt.conf
677 # TODO: Remove hardcoding from template
678 (ac_fd, ac_name) = mkstemp()
679 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
681 'cachedir': cnf["Dir::Cache"],
682 'overridedir': cnf["Dir::Override"],
686 # Run apt-ftparchive generate
687 os.chdir(os.path.dirname(ac_name))
688 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
690 # Run apt-ftparchive release
691 # TODO: Eww - fix this
692 bname = os.path.basename(self.path)
696 # We have to remove the Release file otherwise it'll be included in the
699 os.unlink(os.path.join(bname, 'Release'))
703 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
705 # Crude hack with open and append, but this whole section is and should be redone.
706 if self.notautomatic:
707 release=open("Release", "a")
708 release.write("NotAutomatic: yes")
713 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
714 if cnf.has_key("Dinstall::SigningPubKeyring"):
715 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
717 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
719 # Move the files if we got this far
720 os.rename('Release', os.path.join(bname, 'Release'))
722 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
724 # Clean up any left behind files
751 def clean_and_update(self, starttime, Logger, dryrun=False):
752 """WARNING: This routine commits for you"""
753 session = DBConn().session().object_session(self)
755 if self.generate_metadata and not dryrun:
756 self.write_metadata(starttime)
758 # Grab files older than our execution time
759 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
765 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
767 Logger.log(["I: Removing %s from the queue" % o.fullpath])
768 os.unlink(o.fullpath)
771 # If it wasn't there, don't worry
772 if e.errno == ENOENT:
775 # TODO: Replace with proper logging call
776 Logger.log(["E: Could not remove %s" % o.fullpath])
783 for f in os.listdir(self.path):
784 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
788 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
789 except NoResultFound:
790 fp = os.path.join(self.path, f)
792 Logger.log(["I: Would remove unused link %s" % fp])
794 Logger.log(["I: Removing unused link %s" % fp])
798 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
800 def add_file_from_pool(self, poolfile):
801 """Copies a file into the pool. Assumes that the PoolFile object is
802 attached to the same SQLAlchemy session as the Queue object is.
804 The caller is responsible for committing after calling this function."""
805 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
807 # Check if we have a file of this name or this ID already
808 for f in self.queuefiles:
809 if f.fileid is not None and f.fileid == poolfile.file_id or \
810 f.poolfile.filename == poolfile_basename:
811 # In this case, update the BuildQueueFile entry so we
812 # don't remove it too early
813 f.lastused = datetime.now()
814 DBConn().session().object_session(poolfile).add(f)
817 # Prepare BuildQueueFile object
818 qf = BuildQueueFile()
819 qf.build_queue_id = self.queue_id
820 qf.lastused = datetime.now()
821 qf.filename = poolfile_basename
823 targetpath = poolfile.fullpath
824 queuepath = os.path.join(self.path, poolfile_basename)
828 # We need to copy instead of symlink
830 utils.copy(targetpath, queuepath)
831 # NULL in the fileid field implies a copy
834 os.symlink(targetpath, queuepath)
835 qf.fileid = poolfile.file_id
839 # Get the same session as the PoolFile is using and add the qf to it
840 DBConn().session().object_session(poolfile).add(qf)
845 __all__.append('BuildQueue')
848 def get_build_queue(queuename, session=None):
850 Returns BuildQueue object for given C{queue name}, creating it if it does not
853 @type queuename: string
854 @param queuename: The name of the queue
856 @type session: Session
857 @param session: Optional SQLA session object (a temporary one will be
858 generated if not supplied)
861 @return: BuildQueue object for the given queue
864 q = session.query(BuildQueue).filter_by(queue_name=queuename)
868 except NoResultFound:
871 __all__.append('get_build_queue')
873 ################################################################################
875 class BuildQueueFile(object):
876 def __init__(self, *args, **kwargs):
880 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
884 return os.path.join(self.buildqueue.path, self.filename)
887 __all__.append('BuildQueueFile')
889 ################################################################################
891 class ChangePendingBinary(object):
892 def __init__(self, *args, **kwargs):
896 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
898 __all__.append('ChangePendingBinary')
900 ################################################################################
902 class ChangePendingFile(object):
903 def __init__(self, *args, **kwargs):
907 return '<ChangePendingFile %s>' % self.change_pending_file_id
909 __all__.append('ChangePendingFile')
911 ################################################################################
913 class ChangePendingSource(object):
914 def __init__(self, *args, **kwargs):
918 return '<ChangePendingSource %s>' % self.change_pending_source_id
920 __all__.append('ChangePendingSource')
922 ################################################################################
924 class Component(ORMObject):
925 def __init__(self, component_name = None):
926 self.component_name = component_name
928 def __eq__(self, val):
929 if isinstance(val, str):
930 return (self.component_name == val)
931 # This signals to use the normal comparison operator
932 return NotImplemented
934 def __ne__(self, val):
935 if isinstance(val, str):
936 return (self.component_name != val)
937 # This signals to use the normal comparison operator
938 return NotImplemented
940 def properties(self):
941 return ['component_name', 'component_id', 'description', \
942 'location_count', 'meets_dfsg', 'overrides_count']
944 def not_null_constraints(self):
945 return ['component_name']
948 __all__.append('Component')
951 def get_component(component, session=None):
953 Returns database id for given C{component}.
955 @type component: string
956 @param component: The name of the override type
959 @return: the database id for the given component
962 component = component.lower()
964 q = session.query(Component).filter_by(component_name=component)
968 except NoResultFound:
971 __all__.append('get_component')
973 ################################################################################
975 class DBConfig(object):
976 def __init__(self, *args, **kwargs):
980 return '<DBConfig %s>' % self.name
982 __all__.append('DBConfig')
984 ################################################################################
987 def get_or_set_contents_file_id(filename, session=None):
989 Returns database id for given filename.
991 If no matching file is found, a row is inserted.
993 @type filename: string
994 @param filename: The filename
995 @type session: SQLAlchemy
996 @param session: Optional SQL session object (a temporary one will be
997 generated if not supplied). If not passed, a commit will be performed at
998 the end of the function, otherwise the caller is responsible for commiting.
1001 @return: the database id for the given component
1004 q = session.query(ContentFilename).filter_by(filename=filename)
1007 ret = q.one().cafilename_id
1008 except NoResultFound:
1009 cf = ContentFilename()
1010 cf.filename = filename
1012 session.commit_or_flush()
1013 ret = cf.cafilename_id
1017 __all__.append('get_or_set_contents_file_id')
1020 def get_contents(suite, overridetype, section=None, session=None):
1022 Returns contents for a suite / overridetype combination, limiting
1023 to a section if not None.
1026 @param suite: Suite object
1028 @type overridetype: OverrideType
1029 @param overridetype: OverrideType object
1031 @type section: Section
1032 @param section: Optional section object to limit results to
1034 @type session: SQLAlchemy
1035 @param session: Optional SQL session object (a temporary one will be
1036 generated if not supplied)
1038 @rtype: ResultsProxy
1039 @return: ResultsProxy object set up to return tuples of (filename, section,
1043 # find me all of the contents for a given suite
1044 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1048 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1049 JOIN content_file_names n ON (c.filename=n.id)
1050 JOIN binaries b ON (b.id=c.binary_pkg)
1051 JOIN override o ON (o.package=b.package)
1052 JOIN section s ON (s.id=o.section)
1053 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1054 AND b.type=:overridetypename"""
1056 vals = {'suiteid': suite.suite_id,
1057 'overridetypeid': overridetype.overridetype_id,
1058 'overridetypename': overridetype.overridetype}
1060 if section is not None:
1061 contents_q += " AND s.id = :sectionid"
1062 vals['sectionid'] = section.section_id
1064 contents_q += " ORDER BY fn"
1066 return session.execute(contents_q, vals)
1068 __all__.append('get_contents')
1070 ################################################################################
1072 class ContentFilepath(object):
1073 def __init__(self, *args, **kwargs):
1077 return '<ContentFilepath %s>' % self.filepath
1079 __all__.append('ContentFilepath')
1082 def get_or_set_contents_path_id(filepath, session=None):
1084 Returns database id for given path.
1086 If no matching file is found, a row is inserted.
1088 @type filepath: string
1089 @param filepath: The filepath
1091 @type session: SQLAlchemy
1092 @param session: Optional SQL session object (a temporary one will be
1093 generated if not supplied). If not passed, a commit will be performed at
1094 the end of the function, otherwise the caller is responsible for commiting.
1097 @return: the database id for the given path
1100 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1103 ret = q.one().cafilepath_id
1104 except NoResultFound:
1105 cf = ContentFilepath()
1106 cf.filepath = filepath
1108 session.commit_or_flush()
1109 ret = cf.cafilepath_id
1113 __all__.append('get_or_set_contents_path_id')
1115 ################################################################################
1117 class ContentAssociation(object):
1118 def __init__(self, *args, **kwargs):
1122 return '<ContentAssociation %s>' % self.ca_id
1124 __all__.append('ContentAssociation')
1126 def insert_content_paths(binary_id, fullpaths, session=None):
1128 Make sure given path is associated with given binary id
1130 @type binary_id: int
1131 @param binary_id: the id of the binary
1132 @type fullpaths: list
1133 @param fullpaths: the list of paths of the file being associated with the binary
1134 @type session: SQLAlchemy session
1135 @param session: Optional SQLAlchemy session. If this is passed, the caller
1136 is responsible for ensuring a transaction has begun and committing the
1137 results or rolling back based on the result code. If not passed, a commit
1138 will be performed at the end of the function, otherwise the caller is
1139 responsible for commiting.
1141 @return: True upon success
1144 privatetrans = False
1146 session = DBConn().session()
1151 def generate_path_dicts():
1152 for fullpath in fullpaths:
1153 if fullpath.startswith( './' ):
1154 fullpath = fullpath[2:]
1156 yield {'filename':fullpath, 'id': binary_id }
1158 for d in generate_path_dicts():
1159 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1168 traceback.print_exc()
1170 # Only rollback if we set up the session ourself
1177 __all__.append('insert_content_paths')
1179 ################################################################################
1181 class DSCFile(object):
1182 def __init__(self, *args, **kwargs):
1186 return '<DSCFile %s>' % self.dscfile_id
1188 __all__.append('DSCFile')
1191 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1193 Returns a list of DSCFiles which may be empty
1195 @type dscfile_id: int (optional)
1196 @param dscfile_id: the dscfile_id of the DSCFiles to find
1198 @type source_id: int (optional)
1199 @param source_id: the source id related to the DSCFiles to find
1201 @type poolfile_id: int (optional)
1202 @param poolfile_id: the poolfile id related to the DSCFiles to find
1205 @return: Possibly empty list of DSCFiles
1208 q = session.query(DSCFile)
1210 if dscfile_id is not None:
1211 q = q.filter_by(dscfile_id=dscfile_id)
1213 if source_id is not None:
1214 q = q.filter_by(source_id=source_id)
1216 if poolfile_id is not None:
1217 q = q.filter_by(poolfile_id=poolfile_id)
1221 __all__.append('get_dscfiles')
1223 ################################################################################
1225 class PoolFile(ORMObject):
1226 def __init__(self, filename = None, location = None, filesize = -1, \
1228 self.filename = filename
1229 self.location = location
1230 self.filesize = filesize
1231 self.md5sum = md5sum
1235 return os.path.join(self.location.path, self.filename)
1237 def is_valid(self, filesize = -1, md5sum = None):
1238 return self.filesize == long(filesize) and self.md5sum == md5sum
1240 def properties(self):
1241 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1242 'sha256sum', 'location', 'source', 'binary', 'last_used']
1244 def not_null_constraints(self):
1245 return ['filename', 'md5sum', 'location']
1247 __all__.append('PoolFile')
1250 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1253 (ValidFileFound [boolean], PoolFile object or None)
1255 @type filename: string
1256 @param filename: the filename of the file to check against the DB
1259 @param filesize: the size of the file to check against the DB
1261 @type md5sum: string
1262 @param md5sum: the md5sum of the file to check against the DB
1264 @type location_id: int
1265 @param location_id: the id of the location to look in
1268 @return: Tuple of length 2.
1269 - If valid pool file found: (C{True}, C{PoolFile object})
1270 - If valid pool file not found:
1271 - (C{False}, C{None}) if no file found
1272 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1275 poolfile = session.query(Location).get(location_id). \
1276 files.filter_by(filename=filename).first()
1278 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1281 return (valid, poolfile)
1283 __all__.append('check_poolfile')
1285 # TODO: the implementation can trivially be inlined at the place where the
1286 # function is called
1288 def get_poolfile_by_id(file_id, session=None):
1290 Returns a PoolFile objects or None for the given id
1293 @param file_id: the id of the file to look for
1295 @rtype: PoolFile or None
1296 @return: either the PoolFile object or None
1299 return session.query(PoolFile).get(file_id)
1301 __all__.append('get_poolfile_by_id')
1304 def get_poolfile_like_name(filename, session=None):
1306 Returns an array of PoolFile objects which are like the given name
1308 @type filename: string
1309 @param filename: the filename of the file to check against the DB
1312 @return: array of PoolFile objects
1315 # TODO: There must be a way of properly using bind parameters with %FOO%
1316 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1320 __all__.append('get_poolfile_like_name')
1323 def add_poolfile(filename, datadict, location_id, session=None):
1325 Add a new file to the pool
1327 @type filename: string
1328 @param filename: filename
1330 @type datadict: dict
1331 @param datadict: dict with needed data
1333 @type location_id: int
1334 @param location_id: database id of the location
1337 @return: the PoolFile object created
1339 poolfile = PoolFile()
1340 poolfile.filename = filename
1341 poolfile.filesize = datadict["size"]
1342 poolfile.md5sum = datadict["md5sum"]
1343 poolfile.sha1sum = datadict["sha1sum"]
1344 poolfile.sha256sum = datadict["sha256sum"]
1345 poolfile.location_id = location_id
1347 session.add(poolfile)
1348 # Flush to get a file id (NB: This is not a commit)
1353 __all__.append('add_poolfile')
1355 ################################################################################
1357 class Fingerprint(ORMObject):
1358 def __init__(self, fingerprint = None):
1359 self.fingerprint = fingerprint
1361 def properties(self):
1362 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1365 def not_null_constraints(self):
1366 return ['fingerprint']
1368 __all__.append('Fingerprint')
1371 def get_fingerprint(fpr, session=None):
1373 Returns Fingerprint object for given fpr.
1376 @param fpr: The fpr to find / add
1378 @type session: SQLAlchemy
1379 @param session: Optional SQL session object (a temporary one will be
1380 generated if not supplied).
1383 @return: the Fingerprint object for the given fpr or None
1386 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1390 except NoResultFound:
1395 __all__.append('get_fingerprint')
1398 def get_or_set_fingerprint(fpr, session=None):
1400 Returns Fingerprint object for given fpr.
1402 If no matching fpr is found, a row is inserted.
1405 @param fpr: The fpr to find / add
1407 @type session: SQLAlchemy
1408 @param session: Optional SQL session object (a temporary one will be
1409 generated if not supplied). If not passed, a commit will be performed at
1410 the end of the function, otherwise the caller is responsible for commiting.
1411 A flush will be performed either way.
1414 @return: the Fingerprint object for the given fpr
1417 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1421 except NoResultFound:
1422 fingerprint = Fingerprint()
1423 fingerprint.fingerprint = fpr
1424 session.add(fingerprint)
1425 session.commit_or_flush()
1430 __all__.append('get_or_set_fingerprint')
1432 ################################################################################
1434 # Helper routine for Keyring class
1435 def get_ldap_name(entry):
1437 for k in ["cn", "mn", "sn"]:
1439 if ret and ret[0] != "" and ret[0] != "-":
1441 return " ".join(name)
1443 ################################################################################
1445 class Keyring(object):
1446 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1447 " --with-colons --fingerprint --fingerprint"
1452 def __init__(self, *args, **kwargs):
1456 return '<Keyring %s>' % self.keyring_name
1458 def de_escape_gpg_str(self, txt):
1459 esclist = re.split(r'(\\x..)', txt)
1460 for x in range(1,len(esclist),2):
1461 esclist[x] = "%c" % (int(esclist[x][2:],16))
1462 return "".join(esclist)
1464 def parse_address(self, uid):
1465 """parses uid and returns a tuple of real name and email address"""
1467 (name, address) = email.Utils.parseaddr(uid)
1468 name = re.sub(r"\s*[(].*[)]", "", name)
1469 name = self.de_escape_gpg_str(name)
1472 return (name, address)
1474 def load_keys(self, keyring):
1475 if not self.keyring_id:
1476 raise Exception('Must be initialized with database information')
1478 k = os.popen(self.gpg_invocation % keyring, "r")
1482 for line in k.xreadlines():
1483 field = line.split(":")
1484 if field[0] == "pub":
1487 (name, addr) = self.parse_address(field[9])
1489 self.keys[key]["email"] = addr
1490 self.keys[key]["name"] = name
1491 self.keys[key]["fingerprints"] = []
1493 elif key and field[0] == "sub" and len(field) >= 12:
1494 signingkey = ("s" in field[11])
1495 elif key and field[0] == "uid":
1496 (name, addr) = self.parse_address(field[9])
1497 if "email" not in self.keys[key] and "@" in addr:
1498 self.keys[key]["email"] = addr
1499 self.keys[key]["name"] = name
1500 elif signingkey and field[0] == "fpr":
1501 self.keys[key]["fingerprints"].append(field[9])
1502 self.fpr_lookup[field[9]] = key
1504 def import_users_from_ldap(self, session):
1508 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1509 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1511 l = ldap.open(LDAPServer)
1512 l.simple_bind_s("","")
1513 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1514 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1515 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1517 ldap_fin_uid_id = {}
1524 uid = entry["uid"][0]
1525 name = get_ldap_name(entry)
1526 fingerprints = entry["keyFingerPrint"]
1528 for f in fingerprints:
1529 key = self.fpr_lookup.get(f, None)
1530 if key not in self.keys:
1532 self.keys[key]["uid"] = uid
1536 keyid = get_or_set_uid(uid, session).uid_id
1537 byuid[keyid] = (uid, name)
1538 byname[uid] = (keyid, name)
1540 return (byname, byuid)
1542 def generate_users_from_keyring(self, format, session):
1546 for x in self.keys.keys():
1547 if "email" not in self.keys[x]:
1549 self.keys[x]["uid"] = format % "invalid-uid"
1551 uid = format % self.keys[x]["email"]
1552 keyid = get_or_set_uid(uid, session).uid_id
1553 byuid[keyid] = (uid, self.keys[x]["name"])
1554 byname[uid] = (keyid, self.keys[x]["name"])
1555 self.keys[x]["uid"] = uid
1558 uid = format % "invalid-uid"
1559 keyid = get_or_set_uid(uid, session).uid_id
1560 byuid[keyid] = (uid, "ungeneratable user id")
1561 byname[uid] = (keyid, "ungeneratable user id")
1563 return (byname, byuid)
1565 __all__.append('Keyring')
1568 def get_keyring(keyring, session=None):
1570 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1571 If C{keyring} already has an entry, simply return the existing Keyring
1573 @type keyring: string
1574 @param keyring: the keyring name
1577 @return: the Keyring object for this keyring
1580 q = session.query(Keyring).filter_by(keyring_name=keyring)
1584 except NoResultFound:
1587 __all__.append('get_keyring')
1589 ################################################################################
1591 class KeyringACLMap(object):
1592 def __init__(self, *args, **kwargs):
1596 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1598 __all__.append('KeyringACLMap')
1600 ################################################################################
1602 class DBChange(object):
1603 def __init__(self, *args, **kwargs):
1607 return '<DBChange %s>' % self.changesname
1609 def clean_from_queue(self):
1610 session = DBConn().session().object_session(self)
1612 # Remove changes_pool_files entries
1615 # Remove changes_pending_files references
1618 # Clear out of queue
1619 self.in_queue = None
1620 self.approved_for_id = None
1622 __all__.append('DBChange')
1625 def get_dbchange(filename, session=None):
1627 returns DBChange object for given C{filename}.
1629 @type filename: string
1630 @param filename: the name of the file
1632 @type session: Session
1633 @param session: Optional SQLA session object (a temporary one will be
1634 generated if not supplied)
1637 @return: DBChange object for the given filename (C{None} if not present)
1640 q = session.query(DBChange).filter_by(changesname=filename)
1644 except NoResultFound:
1647 __all__.append('get_dbchange')
1649 ################################################################################
1651 class Location(ORMObject):
1652 def __init__(self, path = None, component = None):
1654 self.component = component
1655 # the column 'type' should go away, see comment at mapper
1656 self.archive_type = 'pool'
1658 def properties(self):
1659 return ['path', 'location_id', 'archive_type', 'component', \
1662 def not_null_constraints(self):
1663 return ['path', 'archive_type']
1665 __all__.append('Location')
1668 def get_location(location, component=None, archive=None, session=None):
1670 Returns Location object for the given combination of location, component
1673 @type location: string
1674 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1676 @type component: string
1677 @param component: the component name (if None, no restriction applied)
1679 @type archive: string
1680 @param archive: the archive name (if None, no restriction applied)
1682 @rtype: Location / None
1683 @return: Either a Location object or None if one can't be found
1686 q = session.query(Location).filter_by(path=location)
1688 if archive is not None:
1689 q = q.join(Archive).filter_by(archive_name=archive)
1691 if component is not None:
1692 q = q.join(Component).filter_by(component_name=component)
1696 except NoResultFound:
1699 __all__.append('get_location')
1701 ################################################################################
1703 class Maintainer(ORMObject):
1704 def __init__(self, name = None):
1707 def properties(self):
1708 return ['name', 'maintainer_id']
1710 def not_null_constraints(self):
1713 def get_split_maintainer(self):
1714 if not hasattr(self, 'name') or self.name is None:
1715 return ('', '', '', '')
1717 return fix_maintainer(self.name.strip())
1719 __all__.append('Maintainer')
1722 def get_or_set_maintainer(name, session=None):
1724 Returns Maintainer object for given maintainer name.
1726 If no matching maintainer name is found, a row is inserted.
1729 @param name: The maintainer name to add
1731 @type session: SQLAlchemy
1732 @param session: Optional SQL session object (a temporary one will be
1733 generated if not supplied). If not passed, a commit will be performed at
1734 the end of the function, otherwise the caller is responsible for commiting.
1735 A flush will be performed either way.
1738 @return: the Maintainer object for the given maintainer
1741 q = session.query(Maintainer).filter_by(name=name)
1744 except NoResultFound:
1745 maintainer = Maintainer()
1746 maintainer.name = name
1747 session.add(maintainer)
1748 session.commit_or_flush()
1753 __all__.append('get_or_set_maintainer')
1756 def get_maintainer(maintainer_id, session=None):
1758 Return the name of the maintainer behind C{maintainer_id} or None if that
1759 maintainer_id is invalid.
1761 @type maintainer_id: int
1762 @param maintainer_id: the id of the maintainer
1765 @return: the Maintainer with this C{maintainer_id}
1768 return session.query(Maintainer).get(maintainer_id)
1770 __all__.append('get_maintainer')
1772 ################################################################################
1774 class NewComment(object):
1775 def __init__(self, *args, **kwargs):
1779 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1781 __all__.append('NewComment')
1784 def has_new_comment(package, version, session=None):
1786 Returns true if the given combination of C{package}, C{version} has a comment.
1788 @type package: string
1789 @param package: name of the package
1791 @type version: string
1792 @param version: package version
1794 @type session: Session
1795 @param session: Optional SQLA session object (a temporary one will be
1796 generated if not supplied)
1802 q = session.query(NewComment)
1803 q = q.filter_by(package=package)
1804 q = q.filter_by(version=version)
1806 return bool(q.count() > 0)
1808 __all__.append('has_new_comment')
1811 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1813 Returns (possibly empty) list of NewComment objects for the given
1816 @type package: string (optional)
1817 @param package: name of the package
1819 @type version: string (optional)
1820 @param version: package version
1822 @type comment_id: int (optional)
1823 @param comment_id: An id of a comment
1825 @type session: Session
1826 @param session: Optional SQLA session object (a temporary one will be
1827 generated if not supplied)
1830 @return: A (possibly empty) list of NewComment objects will be returned
1833 q = session.query(NewComment)
1834 if package is not None: q = q.filter_by(package=package)
1835 if version is not None: q = q.filter_by(version=version)
1836 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1840 __all__.append('get_new_comments')
1842 ################################################################################
1844 class Override(ORMObject):
1845 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1846 section = None, priority = None):
1847 self.package = package
1849 self.component = component
1850 self.overridetype = overridetype
1851 self.section = section
1852 self.priority = priority
1854 def properties(self):
1855 return ['package', 'suite', 'component', 'overridetype', 'section', \
1858 def not_null_constraints(self):
1859 return ['package', 'suite', 'component', 'overridetype', 'section']
1861 __all__.append('Override')
1864 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1866 Returns Override object for the given parameters
1868 @type package: string
1869 @param package: The name of the package
1871 @type suite: string, list or None
1872 @param suite: The name of the suite (or suites if a list) to limit to. If
1873 None, don't limit. Defaults to None.
1875 @type component: string, list or None
1876 @param component: The name of the component (or components if a list) to
1877 limit to. If None, don't limit. Defaults to None.
1879 @type overridetype: string, list or None
1880 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1881 limit to. If None, don't limit. Defaults to None.
1883 @type session: Session
1884 @param session: Optional SQLA session object (a temporary one will be
1885 generated if not supplied)
1888 @return: A (possibly empty) list of Override objects will be returned
1891 q = session.query(Override)
1892 q = q.filter_by(package=package)
1894 if suite is not None:
1895 if not isinstance(suite, list): suite = [suite]
1896 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1898 if component is not None:
1899 if not isinstance(component, list): component = [component]
1900 q = q.join(Component).filter(Component.component_name.in_(component))
1902 if overridetype is not None:
1903 if not isinstance(overridetype, list): overridetype = [overridetype]
1904 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1908 __all__.append('get_override')
1911 ################################################################################
1913 class OverrideType(ORMObject):
1914 def __init__(self, overridetype = None):
1915 self.overridetype = overridetype
1917 def properties(self):
1918 return ['overridetype', 'overridetype_id', 'overrides_count']
1920 def not_null_constraints(self):
1921 return ['overridetype']
1923 __all__.append('OverrideType')
1926 def get_override_type(override_type, session=None):
1928 Returns OverrideType object for given C{override type}.
1930 @type override_type: string
1931 @param override_type: The name of the override type
1933 @type session: Session
1934 @param session: Optional SQLA session object (a temporary one will be
1935 generated if not supplied)
1938 @return: the database id for the given override type
1941 q = session.query(OverrideType).filter_by(overridetype=override_type)
1945 except NoResultFound:
1948 __all__.append('get_override_type')
1950 ################################################################################
1952 class PolicyQueue(object):
1953 def __init__(self, *args, **kwargs):
1957 return '<PolicyQueue %s>' % self.queue_name
1959 __all__.append('PolicyQueue')
1962 def get_policy_queue(queuename, session=None):
1964 Returns PolicyQueue object for given C{queue name}
1966 @type queuename: string
1967 @param queuename: The name of the queue
1969 @type session: Session
1970 @param session: Optional SQLA session object (a temporary one will be
1971 generated if not supplied)
1974 @return: PolicyQueue object for the given queue
1977 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1981 except NoResultFound:
1984 __all__.append('get_policy_queue')
1987 def get_policy_queue_from_path(pathname, session=None):
1989 Returns PolicyQueue object for given C{path name}
1991 @type queuename: string
1992 @param queuename: The path
1994 @type session: Session
1995 @param session: Optional SQLA session object (a temporary one will be
1996 generated if not supplied)
1999 @return: PolicyQueue object for the given queue
2002 q = session.query(PolicyQueue).filter_by(path=pathname)
2006 except NoResultFound:
2009 __all__.append('get_policy_queue_from_path')
2011 ################################################################################
2013 class Priority(ORMObject):
2014 def __init__(self, priority = None, level = None):
2015 self.priority = priority
2018 def properties(self):
2019 return ['priority', 'priority_id', 'level', 'overrides_count']
2021 def not_null_constraints(self):
2022 return ['priority', 'level']
2024 def __eq__(self, val):
2025 if isinstance(val, str):
2026 return (self.priority == val)
2027 # This signals to use the normal comparison operator
2028 return NotImplemented
2030 def __ne__(self, val):
2031 if isinstance(val, str):
2032 return (self.priority != val)
2033 # This signals to use the normal comparison operator
2034 return NotImplemented
2036 __all__.append('Priority')
2039 def get_priority(priority, session=None):
2041 Returns Priority object for given C{priority name}.
2043 @type priority: string
2044 @param priority: The name of the priority
2046 @type session: Session
2047 @param session: Optional SQLA session object (a temporary one will be
2048 generated if not supplied)
2051 @return: Priority object for the given priority
2054 q = session.query(Priority).filter_by(priority=priority)
2058 except NoResultFound:
2061 __all__.append('get_priority')
2064 def get_priorities(session=None):
2066 Returns dictionary of priority names -> id mappings
2068 @type session: Session
2069 @param session: Optional SQL session object (a temporary one will be
2070 generated if not supplied)
2073 @return: dictionary of priority names -> id mappings
2077 q = session.query(Priority)
2079 ret[x.priority] = x.priority_id
2083 __all__.append('get_priorities')
2085 ################################################################################
2087 class Section(ORMObject):
2088 def __init__(self, section = None):
2089 self.section = section
2091 def properties(self):
2092 return ['section', 'section_id', 'overrides_count']
2094 def not_null_constraints(self):
2097 def __eq__(self, val):
2098 if isinstance(val, str):
2099 return (self.section == val)
2100 # This signals to use the normal comparison operator
2101 return NotImplemented
2103 def __ne__(self, val):
2104 if isinstance(val, str):
2105 return (self.section != val)
2106 # This signals to use the normal comparison operator
2107 return NotImplemented
2109 __all__.append('Section')
2112 def get_section(section, session=None):
2114 Returns Section object for given C{section name}.
2116 @type section: string
2117 @param section: The name of the section
2119 @type session: Session
2120 @param session: Optional SQLA session object (a temporary one will be
2121 generated if not supplied)
2124 @return: Section object for the given section name
2127 q = session.query(Section).filter_by(section=section)
2131 except NoResultFound:
2134 __all__.append('get_section')
2137 def get_sections(session=None):
2139 Returns dictionary of section names -> id mappings
2141 @type session: Session
2142 @param session: Optional SQL session object (a temporary one will be
2143 generated if not supplied)
2146 @return: dictionary of section names -> id mappings
2150 q = session.query(Section)
2152 ret[x.section] = x.section_id
2156 __all__.append('get_sections')
2158 ################################################################################
2160 class DBSource(ORMObject):
2161 def __init__(self, source = None, version = None, maintainer = None, \
2162 changedby = None, poolfile = None, install_date = None):
2163 self.source = source
2164 self.version = version
2165 self.maintainer = maintainer
2166 self.changedby = changedby
2167 self.poolfile = poolfile
2168 self.install_date = install_date
2170 def properties(self):
2171 return ['source', 'source_id', 'maintainer', 'changedby', \
2172 'fingerprint', 'poolfile', 'version', 'suites_count', \
2173 'install_date', 'binaries_count']
2175 def not_null_constraints(self):
2176 return ['source', 'version', 'install_date', 'maintainer', \
2177 'changedby', 'poolfile', 'install_date']
2179 metadata = association_proxy('key', 'value')
2181 __all__.append('DBSource')
2184 def source_exists(source, source_version, suites = ["any"], session=None):
2186 Ensure that source exists somewhere in the archive for the binary
2187 upload being processed.
2188 1. exact match => 1.0-3
2189 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2191 @type source: string
2192 @param source: source name
2194 @type source_version: string
2195 @param source_version: expected source version
2198 @param suites: list of suites to check in, default I{any}
2200 @type session: Session
2201 @param session: Optional SQLA session object (a temporary one will be
2202 generated if not supplied)
2205 @return: returns 1 if a source with expected version is found, otherwise 0
2212 from daklib.regexes import re_bin_only_nmu
2213 orig_source_version = re_bin_only_nmu.sub('', source_version)
2215 for suite in suites:
2216 q = session.query(DBSource).filter_by(source=source). \
2217 filter(DBSource.version.in_([source_version, orig_source_version]))
2219 # source must exist in suite X, or in some other suite that's
2220 # mapped to X, recursively... silent-maps are counted too,
2221 # unreleased-maps aren't.
2222 maps = cnf.ValueList("SuiteMappings")[:]
2224 maps = [ m.split() for m in maps ]
2225 maps = [ (x[1], x[2]) for x in maps
2226 if x[0] == "map" or x[0] == "silent-map" ]
2228 for (from_, to) in maps:
2229 if from_ in s and to not in s:
2232 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2237 # No source found so return not ok
2242 __all__.append('source_exists')
2245 def get_suites_source_in(source, session=None):
2247 Returns list of Suite objects which given C{source} name is in
2250 @param source: DBSource package name to search for
2253 @return: list of Suite objects for the given source
2256 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2258 __all__.append('get_suites_source_in')
2261 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2263 Returns list of DBSource objects for given C{source} name and other parameters
2266 @param source: DBSource package name to search for
2268 @type version: str or None
2269 @param version: DBSource version name to search for or None if not applicable
2271 @type dm_upload_allowed: bool
2272 @param dm_upload_allowed: If None, no effect. If True or False, only
2273 return packages with that dm_upload_allowed setting
2275 @type session: Session
2276 @param session: Optional SQL session object (a temporary one will be
2277 generated if not supplied)
2280 @return: list of DBSource objects for the given name (may be empty)
2283 q = session.query(DBSource).filter_by(source=source)
2285 if version is not None:
2286 q = q.filter_by(version=version)
2288 if dm_upload_allowed is not None:
2289 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2293 __all__.append('get_sources_from_name')
2295 # FIXME: This function fails badly if it finds more than 1 source package and
2296 # its implementation is trivial enough to be inlined.
2298 def get_source_in_suite(source, suite, session=None):
2300 Returns a DBSource object for a combination of C{source} and C{suite}.
2302 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2303 - B{suite} - a suite name, eg. I{unstable}
2305 @type source: string
2306 @param source: source package name
2309 @param suite: the suite name
2312 @return: the version for I{source} in I{suite}
2316 q = get_suite(suite, session).get_sources(source)
2319 except NoResultFound:
2322 __all__.append('get_source_in_suite')
2324 ################################################################################
2327 def add_dsc_to_db(u, filename, session=None):
2328 entry = u.pkg.files[filename]
2332 source.source = u.pkg.dsc["source"]
2333 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2334 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2335 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2336 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2337 source.install_date = datetime.now().date()
2339 dsc_component = entry["component"]
2340 dsc_location_id = entry["location id"]
2342 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2344 # Set up a new poolfile if necessary
2345 if not entry.has_key("files id") or not entry["files id"]:
2346 filename = entry["pool name"] + filename
2347 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2349 pfs.append(poolfile)
2350 entry["files id"] = poolfile.file_id
2352 source.poolfile_id = entry["files id"]
2355 suite_names = u.pkg.changes["distribution"].keys()
2356 source.suites = session.query(Suite). \
2357 filter(Suite.suite_name.in_(suite_names)).all()
2359 # Add the source files to the DB (files and dsc_files)
2361 dscfile.source_id = source.source_id
2362 dscfile.poolfile_id = entry["files id"]
2363 session.add(dscfile)
2365 for dsc_file, dentry in u.pkg.dsc_files.items():
2367 df.source_id = source.source_id
2369 # If the .orig tarball is already in the pool, it's
2370 # files id is stored in dsc_files by check_dsc().
2371 files_id = dentry.get("files id", None)
2373 # Find the entry in the files hash
2374 # TODO: Bail out here properly
2376 for f, e in u.pkg.files.items():
2381 if files_id is None:
2382 filename = dfentry["pool name"] + dsc_file
2384 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2385 # FIXME: needs to check for -1/-2 and or handle exception
2386 if found and obj is not None:
2387 files_id = obj.file_id
2390 # If still not found, add it
2391 if files_id is None:
2392 # HACK: Force sha1sum etc into dentry
2393 dentry["sha1sum"] = dfentry["sha1sum"]
2394 dentry["sha256sum"] = dfentry["sha256sum"]
2395 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2396 pfs.append(poolfile)
2397 files_id = poolfile.file_id
2399 poolfile = get_poolfile_by_id(files_id, session)
2400 if poolfile is None:
2401 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2402 pfs.append(poolfile)
2404 df.poolfile_id = files_id
2407 # Add the src_uploaders to the DB
2408 uploader_ids = [source.maintainer_id]
2409 if u.pkg.dsc.has_key("uploaders"):
2410 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2412 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2415 for up_id in uploader_ids:
2416 if added_ids.has_key(up_id):
2418 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2424 su.maintainer_id = up_id
2425 su.source_id = source.source_id
2430 return source, dsc_component, dsc_location_id, pfs
2432 __all__.append('add_dsc_to_db')
2435 def add_deb_to_db(u, filename, session=None):
2437 Contrary to what you might expect, this routine deals with both
2438 debs and udebs. That info is in 'dbtype', whilst 'type' is
2439 'deb' for both of them
2442 entry = u.pkg.files[filename]
2445 bin.package = entry["package"]
2446 bin.version = entry["version"]
2447 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2448 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2449 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2450 bin.binarytype = entry["dbtype"]
2453 filename = entry["pool name"] + filename
2454 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2455 if not entry.get("location id", None):
2456 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2458 if entry.get("files id", None):
2459 poolfile = get_poolfile_by_id(bin.poolfile_id)
2460 bin.poolfile_id = entry["files id"]
2462 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2463 bin.poolfile_id = entry["files id"] = poolfile.file_id
2466 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2467 if len(bin_sources) != 1:
2468 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2469 (bin.package, bin.version, entry["architecture"],
2470 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2472 bin.source_id = bin_sources[0].source_id
2474 if entry.has_key("built-using"):
2475 for srcname, version in entry["built-using"]:
2476 exsources = get_sources_from_name(srcname, version, session=session)
2477 if len(exsources) != 1:
2478 raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2479 (srcname, version, bin.package, bin.version, entry["architecture"],
2480 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2482 bin.extra_sources.append(exsources[0])
2484 # Add and flush object so it has an ID
2487 suite_names = u.pkg.changes["distribution"].keys()
2488 bin.suites = session.query(Suite). \
2489 filter(Suite.suite_name.in_(suite_names)).all()
2493 # Deal with contents - disabled for now
2494 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2496 # print "REJECT\nCould not determine contents of package %s" % bin.package
2497 # session.rollback()
2498 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2502 __all__.append('add_deb_to_db')
2504 ################################################################################
2506 class SourceACL(object):
2507 def __init__(self, *args, **kwargs):
2511 return '<SourceACL %s>' % self.source_acl_id
2513 __all__.append('SourceACL')
2515 ################################################################################
2517 class SrcFormat(object):
2518 def __init__(self, *args, **kwargs):
2522 return '<SrcFormat %s>' % (self.format_name)
2524 __all__.append('SrcFormat')
2526 ################################################################################
2528 class SrcUploader(object):
2529 def __init__(self, *args, **kwargs):
2533 return '<SrcUploader %s>' % self.uploader_id
2535 __all__.append('SrcUploader')
2537 ################################################################################
2539 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2540 ('SuiteID', 'suite_id'),
2541 ('Version', 'version'),
2542 ('Origin', 'origin'),
2544 ('Description', 'description'),
2545 ('Untouchable', 'untouchable'),
2546 ('Announce', 'announce'),
2547 ('Codename', 'codename'),
2548 ('OverrideCodename', 'overridecodename'),
2549 ('ValidTime', 'validtime'),
2550 ('Priority', 'priority'),
2551 ('NotAutomatic', 'notautomatic'),
2552 ('CopyChanges', 'copychanges'),
2553 ('OverrideSuite', 'overridesuite')]
2555 # Why the heck don't we have any UNIQUE constraints in table suite?
2556 # TODO: Add UNIQUE constraints for appropriate columns.
2557 class Suite(ORMObject):
2558 def __init__(self, suite_name = None, version = None):
2559 self.suite_name = suite_name
2560 self.version = version
2562 def properties(self):
2563 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2566 def not_null_constraints(self):
2567 return ['suite_name', 'version']
2569 def __eq__(self, val):
2570 if isinstance(val, str):
2571 return (self.suite_name == val)
2572 # This signals to use the normal comparison operator
2573 return NotImplemented
2575 def __ne__(self, val):
2576 if isinstance(val, str):
2577 return (self.suite_name != val)
2578 # This signals to use the normal comparison operator
2579 return NotImplemented
2583 for disp, field in SUITE_FIELDS:
2584 val = getattr(self, field, None)
2586 ret.append("%s: %s" % (disp, val))
2588 return "\n".join(ret)
2590 def get_architectures(self, skipsrc=False, skipall=False):
2592 Returns list of Architecture objects
2594 @type skipsrc: boolean
2595 @param skipsrc: Whether to skip returning the 'source' architecture entry
2598 @type skipall: boolean
2599 @param skipall: Whether to skip returning the 'all' architecture entry
2603 @return: list of Architecture objects for the given name (may be empty)
2606 q = object_session(self).query(Architecture).with_parent(self)
2608 q = q.filter(Architecture.arch_string != 'source')
2610 q = q.filter(Architecture.arch_string != 'all')
2611 return q.order_by(Architecture.arch_string).all()
2613 def get_sources(self, source):
2615 Returns a query object representing DBSource that is part of C{suite}.
2617 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2619 @type source: string
2620 @param source: source package name
2622 @rtype: sqlalchemy.orm.query.Query
2623 @return: a query of DBSource
2627 session = object_session(self)
2628 return session.query(DBSource).filter_by(source = source). \
2631 __all__.append('Suite')
2634 def get_suite(suite, session=None):
2636 Returns Suite object for given C{suite name}.
2639 @param suite: The name of the suite
2641 @type session: Session
2642 @param session: Optional SQLA session object (a temporary one will be
2643 generated if not supplied)
2646 @return: Suite object for the requested suite name (None if not present)
2649 q = session.query(Suite).filter_by(suite_name=suite)
2653 except NoResultFound:
2656 __all__.append('get_suite')
2658 ################################################################################
2660 # TODO: should be removed because the implementation is too trivial
2662 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2664 Returns list of Architecture objects for given C{suite} name
2667 @param suite: Suite name to search for
2669 @type skipsrc: boolean
2670 @param skipsrc: Whether to skip returning the 'source' architecture entry
2673 @type skipall: boolean
2674 @param skipall: Whether to skip returning the 'all' architecture entry
2677 @type session: Session
2678 @param session: Optional SQL session object (a temporary one will be
2679 generated if not supplied)
2682 @return: list of Architecture objects for the given name (may be empty)
2685 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2687 __all__.append('get_suite_architectures')
2689 ################################################################################
2691 class SuiteSrcFormat(object):
2692 def __init__(self, *args, **kwargs):
2696 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2698 __all__.append('SuiteSrcFormat')
2701 def get_suite_src_formats(suite, session=None):
2703 Returns list of allowed SrcFormat for C{suite}.
2706 @param suite: Suite name to search for
2708 @type session: Session
2709 @param session: Optional SQL session object (a temporary one will be
2710 generated if not supplied)
2713 @return: the list of allowed source formats for I{suite}
2716 q = session.query(SrcFormat)
2717 q = q.join(SuiteSrcFormat)
2718 q = q.join(Suite).filter_by(suite_name=suite)
2719 q = q.order_by('format_name')
2723 __all__.append('get_suite_src_formats')
2725 ################################################################################
2727 class Uid(ORMObject):
2728 def __init__(self, uid = None, name = None):
2732 def __eq__(self, val):
2733 if isinstance(val, str):
2734 return (self.uid == val)
2735 # This signals to use the normal comparison operator
2736 return NotImplemented
2738 def __ne__(self, val):
2739 if isinstance(val, str):
2740 return (self.uid != val)
2741 # This signals to use the normal comparison operator
2742 return NotImplemented
2744 def properties(self):
2745 return ['uid', 'name', 'fingerprint']
2747 def not_null_constraints(self):
2750 __all__.append('Uid')
2753 def get_or_set_uid(uidname, session=None):
2755 Returns uid object for given uidname.
2757 If no matching uidname is found, a row is inserted.
2759 @type uidname: string
2760 @param uidname: The uid to add
2762 @type session: SQLAlchemy
2763 @param session: Optional SQL session object (a temporary one will be
2764 generated if not supplied). If not passed, a commit will be performed at
2765 the end of the function, otherwise the caller is responsible for commiting.
2768 @return: the uid object for the given uidname
2771 q = session.query(Uid).filter_by(uid=uidname)
2775 except NoResultFound:
2779 session.commit_or_flush()
2784 __all__.append('get_or_set_uid')
2787 def get_uid_from_fingerprint(fpr, session=None):
2788 q = session.query(Uid)
2789 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2793 except NoResultFound:
2796 __all__.append('get_uid_from_fingerprint')
2798 ################################################################################
2800 class UploadBlock(object):
2801 def __init__(self, *args, **kwargs):
2805 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2807 __all__.append('UploadBlock')
2809 ################################################################################
2811 class MetadataKey(ORMObject):
2812 def __init__(self, key = None):
2815 def properties(self):
2818 def not_null_constraints(self):
2821 __all__.append('MetadataKey')
2823 ################################################################################
2825 class BinaryMetadata(ORMObject):
2826 def __init__(self, key = None, value = None, binary = None):
2829 self.binary = binary
2831 def properties(self):
2832 return ['binary', 'key', 'value']
2834 def not_null_constraints(self):
2837 __all__.append('BinaryMetadata')
2839 ################################################################################
2841 class SourceMetadata(ORMObject):
2842 def __init__(self, key = None, value = None, source = None):
2845 self.source = source
2847 def properties(self):
2848 return ['source', 'key', 'value']
2850 def not_null_constraints(self):
2853 __all__.append('SourceMetadata')
2855 ################################################################################
2857 class DBConn(object):
2859 database module init.
2863 def __init__(self, *args, **kwargs):
2864 self.__dict__ = self.__shared_state
2866 if not getattr(self, 'initialised', False):
2867 self.initialised = True
2868 self.debug = kwargs.has_key('debug')
2871 def __setuptables(self):
2878 'binaries_metadata',
2882 'build_queue_files',
2887 'changes_pending_binaries',
2888 'changes_pending_files',
2889 'changes_pending_source',
2890 'changes_pending_files_map',
2891 'changes_pending_source_files',
2892 'changes_pool_files',
2894 'extra_src_references',
2903 # TODO: the maintainer column in table override should be removed.
2916 'suite_architectures',
2917 'suite_build_queue_copy',
2918 'suite_src_formats',
2924 'almost_obsolete_all_associations',
2925 'almost_obsolete_src_associations',
2926 'any_associations_source',
2927 'bin_assoc_by_arch',
2928 'bin_associations_binaries',
2929 'binaries_suite_arch',
2930 'binfiles_suite_component_arch',
2933 'newest_all_associations',
2934 'newest_any_associations',
2936 'newest_src_association',
2937 'obsolete_all_associations',
2938 'obsolete_any_associations',
2939 'obsolete_any_by_all_associations',
2940 'obsolete_src_associations',
2942 'src_associations_bin',
2943 'src_associations_src',
2944 'suite_arch_by_name',
2947 for table_name in tables:
2948 table = Table(table_name, self.db_meta, \
2949 autoload=True, useexisting=True)
2950 setattr(self, 'tbl_%s' % table_name, table)
2952 for view_name in views:
2953 view = Table(view_name, self.db_meta, autoload=True)
2954 setattr(self, 'view_%s' % view_name, view)
2956 def __setupmappers(self):
2957 mapper(Architecture, self.tbl_architecture,
2958 properties = dict(arch_id = self.tbl_architecture.c.id,
2959 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2960 order_by='suite_name',
2961 backref=backref('architectures', order_by='arch_string'))),
2962 extension = validator)
2964 mapper(Archive, self.tbl_archive,
2965 properties = dict(archive_id = self.tbl_archive.c.id,
2966 archive_name = self.tbl_archive.c.name))
2968 mapper(BuildQueue, self.tbl_build_queue,
2969 properties = dict(queue_id = self.tbl_build_queue.c.id))
2971 mapper(BuildQueueFile, self.tbl_build_queue_files,
2972 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2973 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2975 mapper(DBBinary, self.tbl_binaries,
2976 properties = dict(binary_id = self.tbl_binaries.c.id,
2977 package = self.tbl_binaries.c.package,
2978 version = self.tbl_binaries.c.version,
2979 maintainer_id = self.tbl_binaries.c.maintainer,
2980 maintainer = relation(Maintainer),
2981 source_id = self.tbl_binaries.c.source,
2982 source = relation(DBSource, backref='binaries'),
2983 arch_id = self.tbl_binaries.c.architecture,
2984 architecture = relation(Architecture),
2985 poolfile_id = self.tbl_binaries.c.file,
2986 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
2987 binarytype = self.tbl_binaries.c.type,
2988 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2989 fingerprint = relation(Fingerprint),
2990 install_date = self.tbl_binaries.c.install_date,
2991 suites = relation(Suite, secondary=self.tbl_bin_associations,
2992 backref=backref('binaries', lazy='dynamic')),
2993 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2994 backref=backref('extra_binary_references', lazy='dynamic')),
2995 key = relation(BinaryMetadata,
2996 collection_class=attribute_mapped_collection('key'))),
2997 extension = validator)
2999 mapper(BinaryACL, self.tbl_binary_acl,
3000 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3002 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3003 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3004 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3005 architecture = relation(Architecture)))
3007 mapper(Component, self.tbl_component,
3008 properties = dict(component_id = self.tbl_component.c.id,
3009 component_name = self.tbl_component.c.name),
3010 extension = validator)
3012 mapper(DBConfig, self.tbl_config,
3013 properties = dict(config_id = self.tbl_config.c.id))
3015 mapper(DSCFile, self.tbl_dsc_files,
3016 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3017 source_id = self.tbl_dsc_files.c.source,
3018 source = relation(DBSource),
3019 poolfile_id = self.tbl_dsc_files.c.file,
3020 poolfile = relation(PoolFile)))
3022 mapper(PoolFile, self.tbl_files,
3023 properties = dict(file_id = self.tbl_files.c.id,
3024 filesize = self.tbl_files.c.size,
3025 location_id = self.tbl_files.c.location,
3026 location = relation(Location,
3027 # using lazy='dynamic' in the back
3028 # reference because we have A LOT of
3029 # files in one location
3030 backref=backref('files', lazy='dynamic'))),
3031 extension = validator)
3033 mapper(Fingerprint, self.tbl_fingerprint,
3034 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3035 uid_id = self.tbl_fingerprint.c.uid,
3036 uid = relation(Uid),
3037 keyring_id = self.tbl_fingerprint.c.keyring,
3038 keyring = relation(Keyring),
3039 source_acl = relation(SourceACL),
3040 binary_acl = relation(BinaryACL)),
3041 extension = validator)
3043 mapper(Keyring, self.tbl_keyrings,
3044 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3045 keyring_id = self.tbl_keyrings.c.id))
3047 mapper(DBChange, self.tbl_changes,
3048 properties = dict(change_id = self.tbl_changes.c.id,
3049 poolfiles = relation(PoolFile,
3050 secondary=self.tbl_changes_pool_files,
3051 backref="changeslinks"),
3052 seen = self.tbl_changes.c.seen,
3053 source = self.tbl_changes.c.source,
3054 binaries = self.tbl_changes.c.binaries,
3055 architecture = self.tbl_changes.c.architecture,
3056 distribution = self.tbl_changes.c.distribution,
3057 urgency = self.tbl_changes.c.urgency,
3058 maintainer = self.tbl_changes.c.maintainer,
3059 changedby = self.tbl_changes.c.changedby,
3060 date = self.tbl_changes.c.date,
3061 version = self.tbl_changes.c.version,
3062 files = relation(ChangePendingFile,
3063 secondary=self.tbl_changes_pending_files_map,
3064 backref="changesfile"),
3065 in_queue_id = self.tbl_changes.c.in_queue,
3066 in_queue = relation(PolicyQueue,
3067 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3068 approved_for_id = self.tbl_changes.c.approved_for))
3070 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3071 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3073 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3074 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3075 filename = self.tbl_changes_pending_files.c.filename,
3076 size = self.tbl_changes_pending_files.c.size,
3077 md5sum = self.tbl_changes_pending_files.c.md5sum,
3078 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3079 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3081 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3082 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3083 change = relation(DBChange),
3084 maintainer = relation(Maintainer,
3085 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3086 changedby = relation(Maintainer,
3087 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3088 fingerprint = relation(Fingerprint),
3089 source_files = relation(ChangePendingFile,
3090 secondary=self.tbl_changes_pending_source_files,
3091 backref="pending_sources")))
3094 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3095 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3096 keyring = relation(Keyring, backref="keyring_acl_map"),
3097 architecture = relation(Architecture)))
3099 mapper(Location, self.tbl_location,
3100 properties = dict(location_id = self.tbl_location.c.id,
3101 component_id = self.tbl_location.c.component,
3102 component = relation(Component, backref='location'),
3103 archive_id = self.tbl_location.c.archive,
3104 archive = relation(Archive),
3105 # FIXME: the 'type' column is old cruft and
3106 # should be removed in the future.
3107 archive_type = self.tbl_location.c.type),
3108 extension = validator)
3110 mapper(Maintainer, self.tbl_maintainer,
3111 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3112 maintains_sources = relation(DBSource, backref='maintainer',
3113 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3114 changed_sources = relation(DBSource, backref='changedby',
3115 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3116 extension = validator)
3118 mapper(NewComment, self.tbl_new_comments,
3119 properties = dict(comment_id = self.tbl_new_comments.c.id))
3121 mapper(Override, self.tbl_override,
3122 properties = dict(suite_id = self.tbl_override.c.suite,
3123 suite = relation(Suite, \
3124 backref=backref('overrides', lazy='dynamic')),
3125 package = self.tbl_override.c.package,
3126 component_id = self.tbl_override.c.component,
3127 component = relation(Component, \
3128 backref=backref('overrides', lazy='dynamic')),
3129 priority_id = self.tbl_override.c.priority,
3130 priority = relation(Priority, \
3131 backref=backref('overrides', lazy='dynamic')),
3132 section_id = self.tbl_override.c.section,
3133 section = relation(Section, \
3134 backref=backref('overrides', lazy='dynamic')),
3135 overridetype_id = self.tbl_override.c.type,
3136 overridetype = relation(OverrideType, \
3137 backref=backref('overrides', lazy='dynamic'))))
3139 mapper(OverrideType, self.tbl_override_type,
3140 properties = dict(overridetype = self.tbl_override_type.c.type,
3141 overridetype_id = self.tbl_override_type.c.id))
3143 mapper(PolicyQueue, self.tbl_policy_queue,
3144 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3146 mapper(Priority, self.tbl_priority,
3147 properties = dict(priority_id = self.tbl_priority.c.id))
3149 mapper(Section, self.tbl_section,
3150 properties = dict(section_id = self.tbl_section.c.id,
3151 section=self.tbl_section.c.section))
3153 mapper(DBSource, self.tbl_source,
3154 properties = dict(source_id = self.tbl_source.c.id,
3155 version = self.tbl_source.c.version,
3156 maintainer_id = self.tbl_source.c.maintainer,
3157 poolfile_id = self.tbl_source.c.file,
3158 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3159 fingerprint_id = self.tbl_source.c.sig_fpr,
3160 fingerprint = relation(Fingerprint),
3161 changedby_id = self.tbl_source.c.changedby,
3162 srcfiles = relation(DSCFile,
3163 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3164 suites = relation(Suite, secondary=self.tbl_src_associations,
3165 backref=backref('sources', lazy='dynamic')),
3166 srcuploaders = relation(SrcUploader),
3167 key = relation(SourceMetadata,
3168 collection_class=attribute_mapped_collection('key'))),
3169 extension = validator)
3171 mapper(SourceACL, self.tbl_source_acl,
3172 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3174 mapper(SrcFormat, self.tbl_src_format,
3175 properties = dict(src_format_id = self.tbl_src_format.c.id,
3176 format_name = self.tbl_src_format.c.format_name))
3178 mapper(SrcUploader, self.tbl_src_uploaders,
3179 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3180 source_id = self.tbl_src_uploaders.c.source,
3181 source = relation(DBSource,
3182 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3183 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3184 maintainer = relation(Maintainer,
3185 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3187 mapper(Suite, self.tbl_suite,
3188 properties = dict(suite_id = self.tbl_suite.c.id,
3189 policy_queue = relation(PolicyQueue),
3190 copy_queues = relation(BuildQueue,
3191 secondary=self.tbl_suite_build_queue_copy)),
3192 extension = validator)
3194 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3195 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3196 suite = relation(Suite, backref='suitesrcformats'),
3197 src_format_id = self.tbl_suite_src_formats.c.src_format,
3198 src_format = relation(SrcFormat)))
3200 mapper(Uid, self.tbl_uid,
3201 properties = dict(uid_id = self.tbl_uid.c.id,
3202 fingerprint = relation(Fingerprint)),
3203 extension = validator)
3205 mapper(UploadBlock, self.tbl_upload_blocks,
3206 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3207 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3208 uid = relation(Uid, backref="uploadblocks")))
3210 mapper(BinContents, self.tbl_bin_contents,
3212 binary = relation(DBBinary,
3213 backref=backref('contents', lazy='dynamic', cascade='all')),
3214 file = self.tbl_bin_contents.c.file))
3216 mapper(MetadataKey, self.tbl_metadata_keys,
3218 key_id = self.tbl_metadata_keys.c.key_id,
3219 key = self.tbl_metadata_keys.c.key))
3221 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3223 binary_id = self.tbl_binaries_metadata.c.bin_id,
3224 binary = relation(DBBinary),
3225 key_id = self.tbl_binaries_metadata.c.key_id,
3226 key = relation(MetadataKey),
3227 value = self.tbl_binaries_metadata.c.value))
3229 mapper(SourceMetadata, self.tbl_source_metadata,
3231 source_id = self.tbl_source_metadata.c.src_id,
3232 source = relation(DBSource),
3233 key_id = self.tbl_source_metadata.c.key_id,
3234 key = relation(MetadataKey),
3235 value = self.tbl_source_metadata.c.value))
3237 ## Connection functions
3238 def __createconn(self):
3239 from config import Config
3241 if cnf.has_key("DB::Service"):
3242 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3243 elif cnf.has_key("DB::Host"):
3245 connstr = "postgresql://%s" % cnf["DB::Host"]
3246 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3247 connstr += ":%s" % cnf["DB::Port"]
3248 connstr += "/%s" % cnf["DB::Name"]
3251 connstr = "postgresql:///%s" % cnf["DB::Name"]
3252 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3253 connstr += "?port=%s" % cnf["DB::Port"]
3255 engine_args = { 'echo': self.debug }
3256 if cnf.has_key('DB::PoolSize'):
3257 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3258 if cnf.has_key('DB::MaxOverflow'):
3259 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3260 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3261 cnf['DB::Unicode'] == 'false':
3262 engine_args['use_native_unicode'] = False
3264 # Monkey patch a new dialect in in order to support service= syntax
3265 import sqlalchemy.dialects.postgresql
3266 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3267 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3268 def create_connect_args(self, url):
3269 if str(url).startswith('postgresql://service='):
3271 servicename = str(url)[21:]
3272 return (['service=%s' % servicename], {})
3274 return PGDialect_psycopg2.create_connect_args(self, url)
3276 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3278 self.db_pg = create_engine(connstr, **engine_args)
3279 self.db_meta = MetaData()
3280 self.db_meta.bind = self.db_pg
3281 self.db_smaker = sessionmaker(bind=self.db_pg,
3285 self.__setuptables()
3286 self.__setupmappers()
3287 self.pid = os.getpid()
3290 # reinitialize DBConn in new processes
3291 if self.pid != os.getpid():
3294 return self.db_smaker()
3296 __all__.append('DBConn')