5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper
63 from sqlalchemy import types as sqltypes
65 # Don't remove this, we re-export the exceptions to scripts which import us
66 from sqlalchemy.exc import *
67 from sqlalchemy.orm.exc import NoResultFound
69 # Only import Config until Queue stuff is changed to store its config
71 from config import Config
72 from textutils import fix_maintainer
73 from dak_exceptions import DBUpdateError, NoSourceFieldError
75 # suppress some deprecation warnings in squeeze related to sqlalchemy
77 warnings.filterwarnings('ignore', \
78 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
80 # TODO: sqlalchemy needs some extra configuration to correctly reflect
81 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
82 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
85 ################################################################################
87 # Patch in support for the debversion field type so that it works during
91 # that is for sqlalchemy 0.6
92 UserDefinedType = sqltypes.UserDefinedType
94 # this one for sqlalchemy 0.5
95 UserDefinedType = sqltypes.TypeEngine
97 class DebVersion(UserDefinedType):
98 def get_col_spec(self):
101 def bind_processor(self, dialect):
104 # ' = None' is needed for sqlalchemy 0.5:
105 def result_processor(self, dialect, coltype = None):
108 sa_major_version = sqlalchemy.__version__[0:3]
109 if sa_major_version in ["0.5", "0.6"]:
110 from sqlalchemy.databases import postgres
111 postgres.ischema_names['debversion'] = DebVersion
113 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
115 ################################################################################
117 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
119 ################################################################################
121 def session_wrapper(fn):
123 Wrapper around common ".., session=None):" handling. If the wrapped
124 function is called without passing 'session', we create a local one
125 and destroy it when the function ends.
127 Also attaches a commit_or_flush method to the session; if we created a
128 local session, this is a synonym for session.commit(), otherwise it is a
129 synonym for session.flush().
132 def wrapped(*args, **kwargs):
133 private_transaction = False
135 # Find the session object
136 session = kwargs.get('session')
139 if len(args) <= len(getargspec(fn)[0]) - 1:
140 # No session specified as last argument or in kwargs
141 private_transaction = True
142 session = kwargs['session'] = DBConn().session()
144 # Session is last argument in args
148 session = args[-1] = DBConn().session()
149 private_transaction = True
151 if private_transaction:
152 session.commit_or_flush = session.commit
154 session.commit_or_flush = session.flush
157 return fn(*args, **kwargs)
159 if private_transaction:
160 # We created a session; close it.
163 wrapped.__doc__ = fn.__doc__
164 wrapped.func_name = fn.func_name
168 __all__.append('session_wrapper')
170 ################################################################################
172 class ORMObject(object):
174 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
175 derived classes must implement the properties() method.
178 def properties(self):
180 This method should be implemented by all derived classes and returns a
181 list of the important properties. The properties 'created' and
182 'modified' will be added automatically. A suffix '_count' should be
183 added to properties that are lists or query objects. The most important
184 property name should be returned as the first element in the list
185 because it is used by repr().
191 Returns a JSON representation of the object based on the properties
192 returned from the properties() method.
195 # add created and modified
196 all_properties = self.properties() + ['created', 'modified']
197 for property in all_properties:
198 # check for list or query
199 if property[-6:] == '_count':
200 real_property = property[:-6]
201 if not hasattr(self, real_property):
203 value = getattr(self, real_property)
204 if hasattr(value, '__len__'):
207 elif hasattr(value, 'count'):
209 value = value.count()
211 raise KeyError('Do not understand property %s.' % property)
213 if not hasattr(self, property):
216 value = getattr(self, property)
220 elif isinstance(value, ORMObject):
221 # use repr() for ORMObject types
224 # we want a string for all other types because json cannot
227 data[property] = value
228 return json.dumps(data)
232 Returns the name of the class.
234 return type(self).__name__
238 Returns a short string representation of the object using the first
239 element from the properties() method.
241 primary_property = self.properties()[0]
242 value = getattr(self, primary_property)
243 return '<%s %s>' % (self.classname(), str(value))
247 Returns a human readable form of the object using the properties()
250 return '<%s %s>' % (self.classname(), self.json())
252 def not_null_constraints(self):
254 Returns a list of properties that must be not NULL. Derived classes
255 should override this method if needed.
259 validation_message = \
260 "Validation failed because property '%s' must not be empty in object\n%s"
264 This function validates the not NULL constraints as returned by
265 not_null_constraints(). It raises the DBUpdateError exception if
268 for property in self.not_null_constraints():
269 # TODO: It is a bit awkward that the mapper configuration allow
270 # directly setting the numeric _id columns. We should get rid of it
272 if hasattr(self, property + '_id') and \
273 getattr(self, property + '_id') is not None:
275 if not hasattr(self, property) or getattr(self, property) is None:
276 raise DBUpdateError(self.validation_message % \
277 (property, str(self)))
281 def get(cls, primary_key, session = None):
283 This is a support function that allows getting an object by its primary
286 Architecture.get(3[, session])
288 instead of the more verbose
290 session.query(Architecture).get(3)
292 return session.query(cls).get(primary_key)
294 def session(self, replace = False):
296 Returns the current session that is associated with the object. May
297 return None is object is in detached state.
300 return object_session(self)
302 def clone(self, session = None):
304 Clones the current object in a new session and returns the new clone. A
305 fresh session is created if the optional session parameter is not
306 provided. The function will fail if a session is provided and has
309 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
310 an existing object to allow several threads to work with their own
311 instances of an ORMObject.
313 WARNING: Only persistent (committed) objects can be cloned. Changes
314 made to the original object that are not committed yet will get lost.
315 The session of the new object will always be rolled back to avoid
319 if self.session() is None:
320 raise RuntimeError( \
321 'Method clone() failed for detached object:\n%s' % self)
322 self.session().flush()
323 mapper = object_mapper(self)
324 primary_key = mapper.primary_key_from_instance(self)
325 object_class = self.__class__
327 session = DBConn().session()
328 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
329 raise RuntimeError( \
330 'Method clone() failed due to unflushed changes in session.')
331 new_object = session.query(object_class).get(primary_key)
333 if new_object is None:
334 raise RuntimeError( \
335 'Method clone() failed for non-persistent object:\n%s' % self)
338 __all__.append('ORMObject')
340 ################################################################################
342 class Validator(MapperExtension):
344 This class calls the validate() method for each instance for the
345 'before_update' and 'before_insert' events. A global object validator is
346 used for configuring the individual mappers.
349 def before_update(self, mapper, connection, instance):
353 def before_insert(self, mapper, connection, instance):
357 validator = Validator()
359 ################################################################################
361 class Architecture(ORMObject):
362 def __init__(self, arch_string = None, description = None):
363 self.arch_string = arch_string
364 self.description = description
366 def __eq__(self, val):
367 if isinstance(val, str):
368 return (self.arch_string== val)
369 # This signals to use the normal comparison operator
370 return NotImplemented
372 def __ne__(self, val):
373 if isinstance(val, str):
374 return (self.arch_string != val)
375 # This signals to use the normal comparison operator
376 return NotImplemented
378 def properties(self):
379 return ['arch_string', 'arch_id', 'suites_count']
381 def not_null_constraints(self):
382 return ['arch_string']
384 __all__.append('Architecture')
387 def get_architecture(architecture, session=None):
389 Returns database id for given C{architecture}.
391 @type architecture: string
392 @param architecture: The name of the architecture
394 @type session: Session
395 @param session: Optional SQLA session object (a temporary one will be
396 generated if not supplied)
399 @return: Architecture object for the given arch (None if not present)
402 q = session.query(Architecture).filter_by(arch_string=architecture)
406 except NoResultFound:
409 __all__.append('get_architecture')
411 # TODO: should be removed because the implementation is too trivial
413 def get_architecture_suites(architecture, session=None):
415 Returns list of Suite objects for given C{architecture} name
417 @type architecture: str
418 @param architecture: Architecture name to search for
420 @type session: Session
421 @param session: Optional SQL session object (a temporary one will be
422 generated if not supplied)
425 @return: list of Suite objects for the given name (may be empty)
428 return get_architecture(architecture, session).suites
430 __all__.append('get_architecture_suites')
432 ################################################################################
434 class Archive(object):
435 def __init__(self, *args, **kwargs):
439 return '<Archive %s>' % self.archive_name
441 __all__.append('Archive')
444 def get_archive(archive, session=None):
446 returns database id for given C{archive}.
448 @type archive: string
449 @param archive: the name of the arhive
451 @type session: Session
452 @param session: Optional SQLA session object (a temporary one will be
453 generated if not supplied)
456 @return: Archive object for the given name (None if not present)
459 archive = archive.lower()
461 q = session.query(Archive).filter_by(archive_name=archive)
465 except NoResultFound:
468 __all__.append('get_archive')
470 ################################################################################
472 class BinContents(ORMObject):
473 def __init__(self, file = None, binary = None):
477 def properties(self):
478 return ['file', 'binary']
480 __all__.append('BinContents')
482 ################################################################################
484 class DBBinary(ORMObject):
485 def __init__(self, package = None, source = None, version = None, \
486 maintainer = None, architecture = None, poolfile = None, \
488 self.package = package
490 self.version = version
491 self.maintainer = maintainer
492 self.architecture = architecture
493 self.poolfile = poolfile
494 self.binarytype = binarytype
496 def properties(self):
497 return ['package', 'version', 'maintainer', 'source', 'architecture', \
498 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
499 'suites_count', 'binary_id', 'contents_count']
501 def not_null_constraints(self):
502 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
505 def get_component_name(self):
506 return self.poolfile.location.component.component_name
508 def scan_contents(self):
510 Yields the contents of the package. Only regular files are yielded and
511 the path names are normalized after converting them from either utf-8
512 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
513 package does not contain any regular file.
515 fullpath = self.poolfile.fullpath
516 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE)
517 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
518 for member in tar.getmembers():
519 if not member.isdir():
520 name = normpath(member.name)
521 # enforce proper utf-8 encoding
524 except UnicodeDecodeError:
525 name = name.decode('iso8859-1').encode('utf-8')
531 __all__.append('DBBinary')
534 def get_suites_binary_in(package, session=None):
536 Returns list of Suite objects which given C{package} name is in
539 @param package: DBBinary package name to search for
542 @return: list of Suite objects for the given package
545 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
547 __all__.append('get_suites_binary_in')
550 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
552 Returns the component name of the newest binary package in suite_list or
553 None if no package is found. The result can be optionally filtered by a list
554 of architecture names.
557 @param package: DBBinary package name to search for
559 @type suite_list: list of str
560 @param suite_list: list of suite_name items
562 @type arch_list: list of str
563 @param arch_list: optional list of arch_string items that defaults to []
565 @rtype: str or NoneType
566 @return: name of component or None
569 q = session.query(DBBinary).filter_by(package = package). \
570 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
571 if len(arch_list) > 0:
572 q = q.join(DBBinary.architecture). \
573 filter(Architecture.arch_string.in_(arch_list))
574 binary = q.order_by(desc(DBBinary.version)).first()
578 return binary.get_component_name()
580 __all__.append('get_component_by_package_suite')
582 ################################################################################
584 class BinaryACL(object):
585 def __init__(self, *args, **kwargs):
589 return '<BinaryACL %s>' % self.binary_acl_id
591 __all__.append('BinaryACL')
593 ################################################################################
595 class BinaryACLMap(object):
596 def __init__(self, *args, **kwargs):
600 return '<BinaryACLMap %s>' % self.binary_acl_map_id
602 __all__.append('BinaryACLMap')
604 ################################################################################
609 ArchiveDir "%(archivepath)s";
610 OverrideDir "%(overridedir)s";
611 CacheDir "%(cachedir)s";
616 Packages::Compress ". bzip2 gzip";
617 Sources::Compress ". bzip2 gzip";
622 bindirectory "incoming"
627 BinOverride "override.sid.all3";
628 BinCacheDB "packages-accepted.db";
630 FileList "%(filelist)s";
633 Packages::Extensions ".deb .udeb";
636 bindirectory "incoming/"
639 BinOverride "override.sid.all3";
640 SrcOverride "override.sid.all3.src";
641 FileList "%(filelist)s";
645 class BuildQueue(object):
646 def __init__(self, *args, **kwargs):
650 return '<BuildQueue %s>' % self.queue_name
652 def write_metadata(self, starttime, force=False):
653 # Do we write out metafiles?
654 if not (force or self.generate_metadata):
657 session = DBConn().session().object_session(self)
659 fl_fd = fl_name = ac_fd = ac_name = None
661 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
662 startdir = os.getcwd()
665 # Grab files we want to include
666 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
667 # Write file list with newer files
668 (fl_fd, fl_name) = mkstemp()
670 os.write(fl_fd, '%s\n' % n.fullpath)
675 # Write minimal apt.conf
676 # TODO: Remove hardcoding from template
677 (ac_fd, ac_name) = mkstemp()
678 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
680 'cachedir': cnf["Dir::Cache"],
681 'overridedir': cnf["Dir::Override"],
685 # Run apt-ftparchive generate
686 os.chdir(os.path.dirname(ac_name))
687 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
689 # Run apt-ftparchive release
690 # TODO: Eww - fix this
691 bname = os.path.basename(self.path)
695 # We have to remove the Release file otherwise it'll be included in the
698 os.unlink(os.path.join(bname, 'Release'))
702 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
704 # Crude hack with open and append, but this whole section is and should be redone.
705 if self.notautomatic:
706 release=open("Release", "a")
707 release.write("NotAutomatic: yes")
712 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
713 if cnf.has_key("Dinstall::SigningPubKeyring"):
714 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
716 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
718 # Move the files if we got this far
719 os.rename('Release', os.path.join(bname, 'Release'))
721 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
723 # Clean up any left behind files
750 def clean_and_update(self, starttime, Logger, dryrun=False):
751 """WARNING: This routine commits for you"""
752 session = DBConn().session().object_session(self)
754 if self.generate_metadata and not dryrun:
755 self.write_metadata(starttime)
757 # Grab files older than our execution time
758 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
764 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
766 Logger.log(["I: Removing %s from the queue" % o.fullpath])
767 os.unlink(o.fullpath)
770 # If it wasn't there, don't worry
771 if e.errno == ENOENT:
774 # TODO: Replace with proper logging call
775 Logger.log(["E: Could not remove %s" % o.fullpath])
782 for f in os.listdir(self.path):
783 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
787 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
788 except NoResultFound:
789 fp = os.path.join(self.path, f)
791 Logger.log(["I: Would remove unused link %s" % fp])
793 Logger.log(["I: Removing unused link %s" % fp])
797 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
799 def add_file_from_pool(self, poolfile):
800 """Copies a file into the pool. Assumes that the PoolFile object is
801 attached to the same SQLAlchemy session as the Queue object is.
803 The caller is responsible for committing after calling this function."""
804 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
806 # Check if we have a file of this name or this ID already
807 for f in self.queuefiles:
808 if f.fileid is not None and f.fileid == poolfile.file_id or \
809 f.poolfile.filename == poolfile_basename:
810 # In this case, update the BuildQueueFile entry so we
811 # don't remove it too early
812 f.lastused = datetime.now()
813 DBConn().session().object_session(poolfile).add(f)
816 # Prepare BuildQueueFile object
817 qf = BuildQueueFile()
818 qf.build_queue_id = self.queue_id
819 qf.lastused = datetime.now()
820 qf.filename = poolfile_basename
822 targetpath = poolfile.fullpath
823 queuepath = os.path.join(self.path, poolfile_basename)
827 # We need to copy instead of symlink
829 utils.copy(targetpath, queuepath)
830 # NULL in the fileid field implies a copy
833 os.symlink(targetpath, queuepath)
834 qf.fileid = poolfile.file_id
838 # Get the same session as the PoolFile is using and add the qf to it
839 DBConn().session().object_session(poolfile).add(qf)
844 __all__.append('BuildQueue')
847 def get_build_queue(queuename, session=None):
849 Returns BuildQueue object for given C{queue name}, creating it if it does not
852 @type queuename: string
853 @param queuename: The name of the queue
855 @type session: Session
856 @param session: Optional SQLA session object (a temporary one will be
857 generated if not supplied)
860 @return: BuildQueue object for the given queue
863 q = session.query(BuildQueue).filter_by(queue_name=queuename)
867 except NoResultFound:
870 __all__.append('get_build_queue')
872 ################################################################################
874 class BuildQueueFile(object):
875 def __init__(self, *args, **kwargs):
879 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
883 return os.path.join(self.buildqueue.path, self.filename)
886 __all__.append('BuildQueueFile')
888 ################################################################################
890 class ChangePendingBinary(object):
891 def __init__(self, *args, **kwargs):
895 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
897 __all__.append('ChangePendingBinary')
899 ################################################################################
901 class ChangePendingFile(object):
902 def __init__(self, *args, **kwargs):
906 return '<ChangePendingFile %s>' % self.change_pending_file_id
908 __all__.append('ChangePendingFile')
910 ################################################################################
912 class ChangePendingSource(object):
913 def __init__(self, *args, **kwargs):
917 return '<ChangePendingSource %s>' % self.change_pending_source_id
919 __all__.append('ChangePendingSource')
921 ################################################################################
923 class Component(ORMObject):
924 def __init__(self, component_name = None):
925 self.component_name = component_name
927 def __eq__(self, val):
928 if isinstance(val, str):
929 return (self.component_name == val)
930 # This signals to use the normal comparison operator
931 return NotImplemented
933 def __ne__(self, val):
934 if isinstance(val, str):
935 return (self.component_name != val)
936 # This signals to use the normal comparison operator
937 return NotImplemented
939 def properties(self):
940 return ['component_name', 'component_id', 'description', \
941 'location_count', 'meets_dfsg', 'overrides_count']
943 def not_null_constraints(self):
944 return ['component_name']
947 __all__.append('Component')
950 def get_component(component, session=None):
952 Returns database id for given C{component}.
954 @type component: string
955 @param component: The name of the override type
958 @return: the database id for the given component
961 component = component.lower()
963 q = session.query(Component).filter_by(component_name=component)
967 except NoResultFound:
970 __all__.append('get_component')
972 ################################################################################
974 class DBConfig(object):
975 def __init__(self, *args, **kwargs):
979 return '<DBConfig %s>' % self.name
981 __all__.append('DBConfig')
983 ################################################################################
986 def get_or_set_contents_file_id(filename, session=None):
988 Returns database id for given filename.
990 If no matching file is found, a row is inserted.
992 @type filename: string
993 @param filename: The filename
994 @type session: SQLAlchemy
995 @param session: Optional SQL session object (a temporary one will be
996 generated if not supplied). If not passed, a commit will be performed at
997 the end of the function, otherwise the caller is responsible for commiting.
1000 @return: the database id for the given component
1003 q = session.query(ContentFilename).filter_by(filename=filename)
1006 ret = q.one().cafilename_id
1007 except NoResultFound:
1008 cf = ContentFilename()
1009 cf.filename = filename
1011 session.commit_or_flush()
1012 ret = cf.cafilename_id
1016 __all__.append('get_or_set_contents_file_id')
1019 def get_contents(suite, overridetype, section=None, session=None):
1021 Returns contents for a suite / overridetype combination, limiting
1022 to a section if not None.
1025 @param suite: Suite object
1027 @type overridetype: OverrideType
1028 @param overridetype: OverrideType object
1030 @type section: Section
1031 @param section: Optional section object to limit results to
1033 @type session: SQLAlchemy
1034 @param session: Optional SQL session object (a temporary one will be
1035 generated if not supplied)
1037 @rtype: ResultsProxy
1038 @return: ResultsProxy object set up to return tuples of (filename, section,
1042 # find me all of the contents for a given suite
1043 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1047 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1048 JOIN content_file_names n ON (c.filename=n.id)
1049 JOIN binaries b ON (b.id=c.binary_pkg)
1050 JOIN override o ON (o.package=b.package)
1051 JOIN section s ON (s.id=o.section)
1052 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1053 AND b.type=:overridetypename"""
1055 vals = {'suiteid': suite.suite_id,
1056 'overridetypeid': overridetype.overridetype_id,
1057 'overridetypename': overridetype.overridetype}
1059 if section is not None:
1060 contents_q += " AND s.id = :sectionid"
1061 vals['sectionid'] = section.section_id
1063 contents_q += " ORDER BY fn"
1065 return session.execute(contents_q, vals)
1067 __all__.append('get_contents')
1069 ################################################################################
1071 class ContentFilepath(object):
1072 def __init__(self, *args, **kwargs):
1076 return '<ContentFilepath %s>' % self.filepath
1078 __all__.append('ContentFilepath')
1081 def get_or_set_contents_path_id(filepath, session=None):
1083 Returns database id for given path.
1085 If no matching file is found, a row is inserted.
1087 @type filepath: string
1088 @param filepath: The filepath
1090 @type session: SQLAlchemy
1091 @param session: Optional SQL session object (a temporary one will be
1092 generated if not supplied). If not passed, a commit will be performed at
1093 the end of the function, otherwise the caller is responsible for commiting.
1096 @return: the database id for the given path
1099 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1102 ret = q.one().cafilepath_id
1103 except NoResultFound:
1104 cf = ContentFilepath()
1105 cf.filepath = filepath
1107 session.commit_or_flush()
1108 ret = cf.cafilepath_id
1112 __all__.append('get_or_set_contents_path_id')
1114 ################################################################################
1116 class ContentAssociation(object):
1117 def __init__(self, *args, **kwargs):
1121 return '<ContentAssociation %s>' % self.ca_id
1123 __all__.append('ContentAssociation')
1125 def insert_content_paths(binary_id, fullpaths, session=None):
1127 Make sure given path is associated with given binary id
1129 @type binary_id: int
1130 @param binary_id: the id of the binary
1131 @type fullpaths: list
1132 @param fullpaths: the list of paths of the file being associated with the binary
1133 @type session: SQLAlchemy session
1134 @param session: Optional SQLAlchemy session. If this is passed, the caller
1135 is responsible for ensuring a transaction has begun and committing the
1136 results or rolling back based on the result code. If not passed, a commit
1137 will be performed at the end of the function, otherwise the caller is
1138 responsible for commiting.
1140 @return: True upon success
1143 privatetrans = False
1145 session = DBConn().session()
1150 def generate_path_dicts():
1151 for fullpath in fullpaths:
1152 if fullpath.startswith( './' ):
1153 fullpath = fullpath[2:]
1155 yield {'filename':fullpath, 'id': binary_id }
1157 for d in generate_path_dicts():
1158 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1167 traceback.print_exc()
1169 # Only rollback if we set up the session ourself
1176 __all__.append('insert_content_paths')
1178 ################################################################################
1180 class DSCFile(object):
1181 def __init__(self, *args, **kwargs):
1185 return '<DSCFile %s>' % self.dscfile_id
1187 __all__.append('DSCFile')
1190 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1192 Returns a list of DSCFiles which may be empty
1194 @type dscfile_id: int (optional)
1195 @param dscfile_id: the dscfile_id of the DSCFiles to find
1197 @type source_id: int (optional)
1198 @param source_id: the source id related to the DSCFiles to find
1200 @type poolfile_id: int (optional)
1201 @param poolfile_id: the poolfile id related to the DSCFiles to find
1204 @return: Possibly empty list of DSCFiles
1207 q = session.query(DSCFile)
1209 if dscfile_id is not None:
1210 q = q.filter_by(dscfile_id=dscfile_id)
1212 if source_id is not None:
1213 q = q.filter_by(source_id=source_id)
1215 if poolfile_id is not None:
1216 q = q.filter_by(poolfile_id=poolfile_id)
1220 __all__.append('get_dscfiles')
1222 ################################################################################
1224 class PoolFile(ORMObject):
1225 def __init__(self, filename = None, location = None, filesize = -1, \
1227 self.filename = filename
1228 self.location = location
1229 self.filesize = filesize
1230 self.md5sum = md5sum
1234 return os.path.join(self.location.path, self.filename)
1236 def is_valid(self, filesize = -1, md5sum = None):
1237 return self.filesize == long(filesize) and self.md5sum == md5sum
1239 def properties(self):
1240 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1241 'sha256sum', 'location', 'source', 'binary', 'last_used']
1243 def not_null_constraints(self):
1244 return ['filename', 'md5sum', 'location']
1246 __all__.append('PoolFile')
1249 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1252 (ValidFileFound [boolean], PoolFile object or None)
1254 @type filename: string
1255 @param filename: the filename of the file to check against the DB
1258 @param filesize: the size of the file to check against the DB
1260 @type md5sum: string
1261 @param md5sum: the md5sum of the file to check against the DB
1263 @type location_id: int
1264 @param location_id: the id of the location to look in
1267 @return: Tuple of length 2.
1268 - If valid pool file found: (C{True}, C{PoolFile object})
1269 - If valid pool file not found:
1270 - (C{False}, C{None}) if no file found
1271 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1274 poolfile = session.query(Location).get(location_id). \
1275 files.filter_by(filename=filename).first()
1277 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1280 return (valid, poolfile)
1282 __all__.append('check_poolfile')
1284 # TODO: the implementation can trivially be inlined at the place where the
1285 # function is called
1287 def get_poolfile_by_id(file_id, session=None):
1289 Returns a PoolFile objects or None for the given id
1292 @param file_id: the id of the file to look for
1294 @rtype: PoolFile or None
1295 @return: either the PoolFile object or None
1298 return session.query(PoolFile).get(file_id)
1300 __all__.append('get_poolfile_by_id')
1303 def get_poolfile_like_name(filename, session=None):
1305 Returns an array of PoolFile objects which are like the given name
1307 @type filename: string
1308 @param filename: the filename of the file to check against the DB
1311 @return: array of PoolFile objects
1314 # TODO: There must be a way of properly using bind parameters with %FOO%
1315 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1319 __all__.append('get_poolfile_like_name')
1322 def add_poolfile(filename, datadict, location_id, session=None):
1324 Add a new file to the pool
1326 @type filename: string
1327 @param filename: filename
1329 @type datadict: dict
1330 @param datadict: dict with needed data
1332 @type location_id: int
1333 @param location_id: database id of the location
1336 @return: the PoolFile object created
1338 poolfile = PoolFile()
1339 poolfile.filename = filename
1340 poolfile.filesize = datadict["size"]
1341 poolfile.md5sum = datadict["md5sum"]
1342 poolfile.sha1sum = datadict["sha1sum"]
1343 poolfile.sha256sum = datadict["sha256sum"]
1344 poolfile.location_id = location_id
1346 session.add(poolfile)
1347 # Flush to get a file id (NB: This is not a commit)
1352 __all__.append('add_poolfile')
1354 ################################################################################
1356 class Fingerprint(ORMObject):
1357 def __init__(self, fingerprint = None):
1358 self.fingerprint = fingerprint
1360 def properties(self):
1361 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1364 def not_null_constraints(self):
1365 return ['fingerprint']
1367 __all__.append('Fingerprint')
1370 def get_fingerprint(fpr, session=None):
1372 Returns Fingerprint object for given fpr.
1375 @param fpr: The fpr to find / add
1377 @type session: SQLAlchemy
1378 @param session: Optional SQL session object (a temporary one will be
1379 generated if not supplied).
1382 @return: the Fingerprint object for the given fpr or None
1385 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1389 except NoResultFound:
1394 __all__.append('get_fingerprint')
1397 def get_or_set_fingerprint(fpr, session=None):
1399 Returns Fingerprint object for given fpr.
1401 If no matching fpr is found, a row is inserted.
1404 @param fpr: The fpr to find / add
1406 @type session: SQLAlchemy
1407 @param session: Optional SQL session object (a temporary one will be
1408 generated if not supplied). If not passed, a commit will be performed at
1409 the end of the function, otherwise the caller is responsible for commiting.
1410 A flush will be performed either way.
1413 @return: the Fingerprint object for the given fpr
1416 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1420 except NoResultFound:
1421 fingerprint = Fingerprint()
1422 fingerprint.fingerprint = fpr
1423 session.add(fingerprint)
1424 session.commit_or_flush()
1429 __all__.append('get_or_set_fingerprint')
1431 ################################################################################
1433 # Helper routine for Keyring class
1434 def get_ldap_name(entry):
1436 for k in ["cn", "mn", "sn"]:
1438 if ret and ret[0] != "" and ret[0] != "-":
1440 return " ".join(name)
1442 ################################################################################
1444 class Keyring(object):
1445 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1446 " --with-colons --fingerprint --fingerprint"
1451 def __init__(self, *args, **kwargs):
1455 return '<Keyring %s>' % self.keyring_name
1457 def de_escape_gpg_str(self, txt):
1458 esclist = re.split(r'(\\x..)', txt)
1459 for x in range(1,len(esclist),2):
1460 esclist[x] = "%c" % (int(esclist[x][2:],16))
1461 return "".join(esclist)
1463 def parse_address(self, uid):
1464 """parses uid and returns a tuple of real name and email address"""
1466 (name, address) = email.Utils.parseaddr(uid)
1467 name = re.sub(r"\s*[(].*[)]", "", name)
1468 name = self.de_escape_gpg_str(name)
1471 return (name, address)
1473 def load_keys(self, keyring):
1474 if not self.keyring_id:
1475 raise Exception('Must be initialized with database information')
1477 k = os.popen(self.gpg_invocation % keyring, "r")
1481 for line in k.xreadlines():
1482 field = line.split(":")
1483 if field[0] == "pub":
1486 (name, addr) = self.parse_address(field[9])
1488 self.keys[key]["email"] = addr
1489 self.keys[key]["name"] = name
1490 self.keys[key]["fingerprints"] = []
1492 elif key and field[0] == "sub" and len(field) >= 12:
1493 signingkey = ("s" in field[11])
1494 elif key and field[0] == "uid":
1495 (name, addr) = self.parse_address(field[9])
1496 if "email" not in self.keys[key] and "@" in addr:
1497 self.keys[key]["email"] = addr
1498 self.keys[key]["name"] = name
1499 elif signingkey and field[0] == "fpr":
1500 self.keys[key]["fingerprints"].append(field[9])
1501 self.fpr_lookup[field[9]] = key
1503 def import_users_from_ldap(self, session):
1507 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1508 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1510 l = ldap.open(LDAPServer)
1511 l.simple_bind_s("","")
1512 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1513 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1514 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1516 ldap_fin_uid_id = {}
1523 uid = entry["uid"][0]
1524 name = get_ldap_name(entry)
1525 fingerprints = entry["keyFingerPrint"]
1527 for f in fingerprints:
1528 key = self.fpr_lookup.get(f, None)
1529 if key not in self.keys:
1531 self.keys[key]["uid"] = uid
1535 keyid = get_or_set_uid(uid, session).uid_id
1536 byuid[keyid] = (uid, name)
1537 byname[uid] = (keyid, name)
1539 return (byname, byuid)
1541 def generate_users_from_keyring(self, format, session):
1545 for x in self.keys.keys():
1546 if "email" not in self.keys[x]:
1548 self.keys[x]["uid"] = format % "invalid-uid"
1550 uid = format % self.keys[x]["email"]
1551 keyid = get_or_set_uid(uid, session).uid_id
1552 byuid[keyid] = (uid, self.keys[x]["name"])
1553 byname[uid] = (keyid, self.keys[x]["name"])
1554 self.keys[x]["uid"] = uid
1557 uid = format % "invalid-uid"
1558 keyid = get_or_set_uid(uid, session).uid_id
1559 byuid[keyid] = (uid, "ungeneratable user id")
1560 byname[uid] = (keyid, "ungeneratable user id")
1562 return (byname, byuid)
1564 __all__.append('Keyring')
1567 def get_keyring(keyring, session=None):
1569 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1570 If C{keyring} already has an entry, simply return the existing Keyring
1572 @type keyring: string
1573 @param keyring: the keyring name
1576 @return: the Keyring object for this keyring
1579 q = session.query(Keyring).filter_by(keyring_name=keyring)
1583 except NoResultFound:
1586 __all__.append('get_keyring')
1588 ################################################################################
1590 class KeyringACLMap(object):
1591 def __init__(self, *args, **kwargs):
1595 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1597 __all__.append('KeyringACLMap')
1599 ################################################################################
1601 class DBChange(object):
1602 def __init__(self, *args, **kwargs):
1606 return '<DBChange %s>' % self.changesname
1608 def clean_from_queue(self):
1609 session = DBConn().session().object_session(self)
1611 # Remove changes_pool_files entries
1614 # Remove changes_pending_files references
1617 # Clear out of queue
1618 self.in_queue = None
1619 self.approved_for_id = None
1621 __all__.append('DBChange')
1624 def get_dbchange(filename, session=None):
1626 returns DBChange object for given C{filename}.
1628 @type filename: string
1629 @param filename: the name of the file
1631 @type session: Session
1632 @param session: Optional SQLA session object (a temporary one will be
1633 generated if not supplied)
1636 @return: DBChange object for the given filename (C{None} if not present)
1639 q = session.query(DBChange).filter_by(changesname=filename)
1643 except NoResultFound:
1646 __all__.append('get_dbchange')
1648 ################################################################################
1650 class Location(ORMObject):
1651 def __init__(self, path = None, component = None):
1653 self.component = component
1654 # the column 'type' should go away, see comment at mapper
1655 self.archive_type = 'pool'
1657 def properties(self):
1658 return ['path', 'location_id', 'archive_type', 'component', \
1661 def not_null_constraints(self):
1662 return ['path', 'archive_type']
1664 __all__.append('Location')
1667 def get_location(location, component=None, archive=None, session=None):
1669 Returns Location object for the given combination of location, component
1672 @type location: string
1673 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1675 @type component: string
1676 @param component: the component name (if None, no restriction applied)
1678 @type archive: string
1679 @param archive: the archive name (if None, no restriction applied)
1681 @rtype: Location / None
1682 @return: Either a Location object or None if one can't be found
1685 q = session.query(Location).filter_by(path=location)
1687 if archive is not None:
1688 q = q.join(Archive).filter_by(archive_name=archive)
1690 if component is not None:
1691 q = q.join(Component).filter_by(component_name=component)
1695 except NoResultFound:
1698 __all__.append('get_location')
1700 ################################################################################
1702 class Maintainer(ORMObject):
1703 def __init__(self, name = None):
1706 def properties(self):
1707 return ['name', 'maintainer_id']
1709 def not_null_constraints(self):
1712 def get_split_maintainer(self):
1713 if not hasattr(self, 'name') or self.name is None:
1714 return ('', '', '', '')
1716 return fix_maintainer(self.name.strip())
1718 __all__.append('Maintainer')
1721 def get_or_set_maintainer(name, session=None):
1723 Returns Maintainer object for given maintainer name.
1725 If no matching maintainer name is found, a row is inserted.
1728 @param name: The maintainer name to add
1730 @type session: SQLAlchemy
1731 @param session: Optional SQL session object (a temporary one will be
1732 generated if not supplied). If not passed, a commit will be performed at
1733 the end of the function, otherwise the caller is responsible for commiting.
1734 A flush will be performed either way.
1737 @return: the Maintainer object for the given maintainer
1740 q = session.query(Maintainer).filter_by(name=name)
1743 except NoResultFound:
1744 maintainer = Maintainer()
1745 maintainer.name = name
1746 session.add(maintainer)
1747 session.commit_or_flush()
1752 __all__.append('get_or_set_maintainer')
1755 def get_maintainer(maintainer_id, session=None):
1757 Return the name of the maintainer behind C{maintainer_id} or None if that
1758 maintainer_id is invalid.
1760 @type maintainer_id: int
1761 @param maintainer_id: the id of the maintainer
1764 @return: the Maintainer with this C{maintainer_id}
1767 return session.query(Maintainer).get(maintainer_id)
1769 __all__.append('get_maintainer')
1771 ################################################################################
1773 class NewComment(object):
1774 def __init__(self, *args, **kwargs):
1778 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1780 __all__.append('NewComment')
1783 def has_new_comment(package, version, session=None):
1785 Returns true if the given combination of C{package}, C{version} has a comment.
1787 @type package: string
1788 @param package: name of the package
1790 @type version: string
1791 @param version: package version
1793 @type session: Session
1794 @param session: Optional SQLA session object (a temporary one will be
1795 generated if not supplied)
1801 q = session.query(NewComment)
1802 q = q.filter_by(package=package)
1803 q = q.filter_by(version=version)
1805 return bool(q.count() > 0)
1807 __all__.append('has_new_comment')
1810 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1812 Returns (possibly empty) list of NewComment objects for the given
1815 @type package: string (optional)
1816 @param package: name of the package
1818 @type version: string (optional)
1819 @param version: package version
1821 @type comment_id: int (optional)
1822 @param comment_id: An id of a comment
1824 @type session: Session
1825 @param session: Optional SQLA session object (a temporary one will be
1826 generated if not supplied)
1829 @return: A (possibly empty) list of NewComment objects will be returned
1832 q = session.query(NewComment)
1833 if package is not None: q = q.filter_by(package=package)
1834 if version is not None: q = q.filter_by(version=version)
1835 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1839 __all__.append('get_new_comments')
1841 ################################################################################
1843 class Override(ORMObject):
1844 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1845 section = None, priority = None):
1846 self.package = package
1848 self.component = component
1849 self.overridetype = overridetype
1850 self.section = section
1851 self.priority = priority
1853 def properties(self):
1854 return ['package', 'suite', 'component', 'overridetype', 'section', \
1857 def not_null_constraints(self):
1858 return ['package', 'suite', 'component', 'overridetype', 'section']
1860 __all__.append('Override')
1863 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1865 Returns Override object for the given parameters
1867 @type package: string
1868 @param package: The name of the package
1870 @type suite: string, list or None
1871 @param suite: The name of the suite (or suites if a list) to limit to. If
1872 None, don't limit. Defaults to None.
1874 @type component: string, list or None
1875 @param component: The name of the component (or components if a list) to
1876 limit to. If None, don't limit. Defaults to None.
1878 @type overridetype: string, list or None
1879 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1880 limit to. If None, don't limit. Defaults to None.
1882 @type session: Session
1883 @param session: Optional SQLA session object (a temporary one will be
1884 generated if not supplied)
1887 @return: A (possibly empty) list of Override objects will be returned
1890 q = session.query(Override)
1891 q = q.filter_by(package=package)
1893 if suite is not None:
1894 if not isinstance(suite, list): suite = [suite]
1895 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1897 if component is not None:
1898 if not isinstance(component, list): component = [component]
1899 q = q.join(Component).filter(Component.component_name.in_(component))
1901 if overridetype is not None:
1902 if not isinstance(overridetype, list): overridetype = [overridetype]
1903 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1907 __all__.append('get_override')
1910 ################################################################################
1912 class OverrideType(ORMObject):
1913 def __init__(self, overridetype = None):
1914 self.overridetype = overridetype
1916 def properties(self):
1917 return ['overridetype', 'overridetype_id', 'overrides_count']
1919 def not_null_constraints(self):
1920 return ['overridetype']
1922 __all__.append('OverrideType')
1925 def get_override_type(override_type, session=None):
1927 Returns OverrideType object for given C{override type}.
1929 @type override_type: string
1930 @param override_type: The name of the override type
1932 @type session: Session
1933 @param session: Optional SQLA session object (a temporary one will be
1934 generated if not supplied)
1937 @return: the database id for the given override type
1940 q = session.query(OverrideType).filter_by(overridetype=override_type)
1944 except NoResultFound:
1947 __all__.append('get_override_type')
1949 ################################################################################
1951 class DebContents(object):
1952 def __init__(self, *args, **kwargs):
1956 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1958 __all__.append('DebContents')
1961 class UdebContents(object):
1962 def __init__(self, *args, **kwargs):
1966 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1968 __all__.append('UdebContents')
1970 class PendingBinContents(object):
1971 def __init__(self, *args, **kwargs):
1975 return '<PendingBinContents %s>' % self.contents_id
1977 __all__.append('PendingBinContents')
1979 def insert_pending_content_paths(package,
1984 Make sure given paths are temporarily associated with given
1988 @param package: the package to associate with should have been read in from the binary control file
1989 @type fullpaths: list
1990 @param fullpaths: the list of paths of the file being associated with the binary
1991 @type session: SQLAlchemy session
1992 @param session: Optional SQLAlchemy session. If this is passed, the caller
1993 is responsible for ensuring a transaction has begun and committing the
1994 results or rolling back based on the result code. If not passed, a commit
1995 will be performed at the end of the function
1997 @return: True upon success, False if there is a problem
2000 privatetrans = False
2003 session = DBConn().session()
2007 arch = get_architecture(package['Architecture'], session)
2008 arch_id = arch.arch_id
2010 # Remove any already existing recorded files for this package
2011 q = session.query(PendingBinContents)
2012 q = q.filter_by(package=package['Package'])
2013 q = q.filter_by(version=package['Version'])
2014 q = q.filter_by(architecture=arch_id)
2017 for fullpath in fullpaths:
2019 if fullpath.startswith( "./" ):
2020 fullpath = fullpath[2:]
2022 pca = PendingBinContents()
2023 pca.package = package['Package']
2024 pca.version = package['Version']
2026 pca.architecture = arch_id
2029 pca.type = 8 # gross
2031 pca.type = 7 # also gross
2034 # Only commit if we set up the session ourself
2042 except Exception, e:
2043 traceback.print_exc()
2045 # Only rollback if we set up the session ourself
2052 __all__.append('insert_pending_content_paths')
2054 ################################################################################
2056 class PolicyQueue(object):
2057 def __init__(self, *args, **kwargs):
2061 return '<PolicyQueue %s>' % self.queue_name
2063 __all__.append('PolicyQueue')
2066 def get_policy_queue(queuename, session=None):
2068 Returns PolicyQueue object for given C{queue name}
2070 @type queuename: string
2071 @param queuename: The name of the queue
2073 @type session: Session
2074 @param session: Optional SQLA session object (a temporary one will be
2075 generated if not supplied)
2078 @return: PolicyQueue object for the given queue
2081 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2085 except NoResultFound:
2088 __all__.append('get_policy_queue')
2091 def get_policy_queue_from_path(pathname, session=None):
2093 Returns PolicyQueue object for given C{path name}
2095 @type queuename: string
2096 @param queuename: The path
2098 @type session: Session
2099 @param session: Optional SQLA session object (a temporary one will be
2100 generated if not supplied)
2103 @return: PolicyQueue object for the given queue
2106 q = session.query(PolicyQueue).filter_by(path=pathname)
2110 except NoResultFound:
2113 __all__.append('get_policy_queue_from_path')
2115 ################################################################################
2117 class Priority(ORMObject):
2118 def __init__(self, priority = None, level = None):
2119 self.priority = priority
2122 def properties(self):
2123 return ['priority', 'priority_id', 'level', 'overrides_count']
2125 def not_null_constraints(self):
2126 return ['priority', 'level']
2128 def __eq__(self, val):
2129 if isinstance(val, str):
2130 return (self.priority == val)
2131 # This signals to use the normal comparison operator
2132 return NotImplemented
2134 def __ne__(self, val):
2135 if isinstance(val, str):
2136 return (self.priority != val)
2137 # This signals to use the normal comparison operator
2138 return NotImplemented
2140 __all__.append('Priority')
2143 def get_priority(priority, session=None):
2145 Returns Priority object for given C{priority name}.
2147 @type priority: string
2148 @param priority: The name of the priority
2150 @type session: Session
2151 @param session: Optional SQLA session object (a temporary one will be
2152 generated if not supplied)
2155 @return: Priority object for the given priority
2158 q = session.query(Priority).filter_by(priority=priority)
2162 except NoResultFound:
2165 __all__.append('get_priority')
2168 def get_priorities(session=None):
2170 Returns dictionary of priority names -> id mappings
2172 @type session: Session
2173 @param session: Optional SQL session object (a temporary one will be
2174 generated if not supplied)
2177 @return: dictionary of priority names -> id mappings
2181 q = session.query(Priority)
2183 ret[x.priority] = x.priority_id
2187 __all__.append('get_priorities')
2189 ################################################################################
2191 class Section(ORMObject):
2192 def __init__(self, section = None):
2193 self.section = section
2195 def properties(self):
2196 return ['section', 'section_id', 'overrides_count']
2198 def not_null_constraints(self):
2201 def __eq__(self, val):
2202 if isinstance(val, str):
2203 return (self.section == val)
2204 # This signals to use the normal comparison operator
2205 return NotImplemented
2207 def __ne__(self, val):
2208 if isinstance(val, str):
2209 return (self.section != val)
2210 # This signals to use the normal comparison operator
2211 return NotImplemented
2213 __all__.append('Section')
2216 def get_section(section, session=None):
2218 Returns Section object for given C{section name}.
2220 @type section: string
2221 @param section: The name of the section
2223 @type session: Session
2224 @param session: Optional SQLA session object (a temporary one will be
2225 generated if not supplied)
2228 @return: Section object for the given section name
2231 q = session.query(Section).filter_by(section=section)
2235 except NoResultFound:
2238 __all__.append('get_section')
2241 def get_sections(session=None):
2243 Returns dictionary of section names -> id mappings
2245 @type session: Session
2246 @param session: Optional SQL session object (a temporary one will be
2247 generated if not supplied)
2250 @return: dictionary of section names -> id mappings
2254 q = session.query(Section)
2256 ret[x.section] = x.section_id
2260 __all__.append('get_sections')
2262 ################################################################################
2264 class DBSource(ORMObject):
2265 def __init__(self, source = None, version = None, maintainer = None, \
2266 changedby = None, poolfile = None, install_date = None):
2267 self.source = source
2268 self.version = version
2269 self.maintainer = maintainer
2270 self.changedby = changedby
2271 self.poolfile = poolfile
2272 self.install_date = install_date
2274 def properties(self):
2275 return ['source', 'source_id', 'maintainer', 'changedby', \
2276 'fingerprint', 'poolfile', 'version', 'suites_count', \
2277 'install_date', 'binaries_count']
2279 def not_null_constraints(self):
2280 return ['source', 'version', 'install_date', 'maintainer', \
2281 'changedby', 'poolfile', 'install_date']
2283 __all__.append('DBSource')
2286 def source_exists(source, source_version, suites = ["any"], session=None):
2288 Ensure that source exists somewhere in the archive for the binary
2289 upload being processed.
2290 1. exact match => 1.0-3
2291 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2293 @type source: string
2294 @param source: source name
2296 @type source_version: string
2297 @param source_version: expected source version
2300 @param suites: list of suites to check in, default I{any}
2302 @type session: Session
2303 @param session: Optional SQLA session object (a temporary one will be
2304 generated if not supplied)
2307 @return: returns 1 if a source with expected version is found, otherwise 0
2314 from daklib.regexes import re_bin_only_nmu
2315 orig_source_version = re_bin_only_nmu.sub('', source_version)
2317 for suite in suites:
2318 q = session.query(DBSource).filter_by(source=source). \
2319 filter(DBSource.version.in_([source_version, orig_source_version]))
2321 # source must exist in suite X, or in some other suite that's
2322 # mapped to X, recursively... silent-maps are counted too,
2323 # unreleased-maps aren't.
2324 maps = cnf.ValueList("SuiteMappings")[:]
2326 maps = [ m.split() for m in maps ]
2327 maps = [ (x[1], x[2]) for x in maps
2328 if x[0] == "map" or x[0] == "silent-map" ]
2330 for (from_, to) in maps:
2331 if from_ in s and to not in s:
2334 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2339 # No source found so return not ok
2344 __all__.append('source_exists')
2347 def get_suites_source_in(source, session=None):
2349 Returns list of Suite objects which given C{source} name is in
2352 @param source: DBSource package name to search for
2355 @return: list of Suite objects for the given source
2358 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2360 __all__.append('get_suites_source_in')
2363 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2365 Returns list of DBSource objects for given C{source} name and other parameters
2368 @param source: DBSource package name to search for
2370 @type version: str or None
2371 @param version: DBSource version name to search for or None if not applicable
2373 @type dm_upload_allowed: bool
2374 @param dm_upload_allowed: If None, no effect. If True or False, only
2375 return packages with that dm_upload_allowed setting
2377 @type session: Session
2378 @param session: Optional SQL session object (a temporary one will be
2379 generated if not supplied)
2382 @return: list of DBSource objects for the given name (may be empty)
2385 q = session.query(DBSource).filter_by(source=source)
2387 if version is not None:
2388 q = q.filter_by(version=version)
2390 if dm_upload_allowed is not None:
2391 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2395 __all__.append('get_sources_from_name')
2397 # FIXME: This function fails badly if it finds more than 1 source package and
2398 # its implementation is trivial enough to be inlined.
2400 def get_source_in_suite(source, suite, session=None):
2402 Returns a DBSource object for a combination of C{source} and C{suite}.
2404 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2405 - B{suite} - a suite name, eg. I{unstable}
2407 @type source: string
2408 @param source: source package name
2411 @param suite: the suite name
2414 @return: the version for I{source} in I{suite}
2418 q = get_suite(suite, session).get_sources(source)
2421 except NoResultFound:
2424 __all__.append('get_source_in_suite')
2426 ################################################################################
2429 def add_dsc_to_db(u, filename, session=None):
2430 entry = u.pkg.files[filename]
2434 source.source = u.pkg.dsc["source"]
2435 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2436 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2437 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2438 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2439 source.install_date = datetime.now().date()
2441 dsc_component = entry["component"]
2442 dsc_location_id = entry["location id"]
2444 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2446 # Set up a new poolfile if necessary
2447 if not entry.has_key("files id") or not entry["files id"]:
2448 filename = entry["pool name"] + filename
2449 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2451 pfs.append(poolfile)
2452 entry["files id"] = poolfile.file_id
2454 source.poolfile_id = entry["files id"]
2457 suite_names = u.pkg.changes["distribution"].keys()
2458 source.suites = session.query(Suite). \
2459 filter(Suite.suite_name.in_(suite_names)).all()
2461 # Add the source files to the DB (files and dsc_files)
2463 dscfile.source_id = source.source_id
2464 dscfile.poolfile_id = entry["files id"]
2465 session.add(dscfile)
2467 for dsc_file, dentry in u.pkg.dsc_files.items():
2469 df.source_id = source.source_id
2471 # If the .orig tarball is already in the pool, it's
2472 # files id is stored in dsc_files by check_dsc().
2473 files_id = dentry.get("files id", None)
2475 # Find the entry in the files hash
2476 # TODO: Bail out here properly
2478 for f, e in u.pkg.files.items():
2483 if files_id is None:
2484 filename = dfentry["pool name"] + dsc_file
2486 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2487 # FIXME: needs to check for -1/-2 and or handle exception
2488 if found and obj is not None:
2489 files_id = obj.file_id
2492 # If still not found, add it
2493 if files_id is None:
2494 # HACK: Force sha1sum etc into dentry
2495 dentry["sha1sum"] = dfentry["sha1sum"]
2496 dentry["sha256sum"] = dfentry["sha256sum"]
2497 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2498 pfs.append(poolfile)
2499 files_id = poolfile.file_id
2501 poolfile = get_poolfile_by_id(files_id, session)
2502 if poolfile is None:
2503 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2504 pfs.append(poolfile)
2506 df.poolfile_id = files_id
2509 # Add the src_uploaders to the DB
2510 uploader_ids = [source.maintainer_id]
2511 if u.pkg.dsc.has_key("uploaders"):
2512 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2514 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2517 for up_id in uploader_ids:
2518 if added_ids.has_key(up_id):
2520 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2526 su.maintainer_id = up_id
2527 su.source_id = source.source_id
2532 return source, dsc_component, dsc_location_id, pfs
2534 __all__.append('add_dsc_to_db')
2537 def add_deb_to_db(u, filename, session=None):
2539 Contrary to what you might expect, this routine deals with both
2540 debs and udebs. That info is in 'dbtype', whilst 'type' is
2541 'deb' for both of them
2544 entry = u.pkg.files[filename]
2547 bin.package = entry["package"]
2548 bin.version = entry["version"]
2549 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2550 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2551 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2552 bin.binarytype = entry["dbtype"]
2555 filename = entry["pool name"] + filename
2556 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2557 if not entry.get("location id", None):
2558 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2560 if entry.get("files id", None):
2561 poolfile = get_poolfile_by_id(bin.poolfile_id)
2562 bin.poolfile_id = entry["files id"]
2564 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2565 bin.poolfile_id = entry["files id"] = poolfile.file_id
2568 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2569 if len(bin_sources) != 1:
2570 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2571 (bin.package, bin.version, entry["architecture"],
2572 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2574 bin.source_id = bin_sources[0].source_id
2576 # Add and flush object so it has an ID
2579 suite_names = u.pkg.changes["distribution"].keys()
2580 bin.suites = session.query(Suite). \
2581 filter(Suite.suite_name.in_(suite_names)).all()
2585 # Deal with contents - disabled for now
2586 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2588 # print "REJECT\nCould not determine contents of package %s" % bin.package
2589 # session.rollback()
2590 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2594 __all__.append('add_deb_to_db')
2596 ################################################################################
2598 class SourceACL(object):
2599 def __init__(self, *args, **kwargs):
2603 return '<SourceACL %s>' % self.source_acl_id
2605 __all__.append('SourceACL')
2607 ################################################################################
2609 class SrcFormat(object):
2610 def __init__(self, *args, **kwargs):
2614 return '<SrcFormat %s>' % (self.format_name)
2616 __all__.append('SrcFormat')
2618 ################################################################################
2620 class SrcUploader(object):
2621 def __init__(self, *args, **kwargs):
2625 return '<SrcUploader %s>' % self.uploader_id
2627 __all__.append('SrcUploader')
2629 ################################################################################
2631 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2632 ('SuiteID', 'suite_id'),
2633 ('Version', 'version'),
2634 ('Origin', 'origin'),
2636 ('Description', 'description'),
2637 ('Untouchable', 'untouchable'),
2638 ('Announce', 'announce'),
2639 ('Codename', 'codename'),
2640 ('OverrideCodename', 'overridecodename'),
2641 ('ValidTime', 'validtime'),
2642 ('Priority', 'priority'),
2643 ('NotAutomatic', 'notautomatic'),
2644 ('CopyChanges', 'copychanges'),
2645 ('OverrideSuite', 'overridesuite')]
2647 # Why the heck don't we have any UNIQUE constraints in table suite?
2648 # TODO: Add UNIQUE constraints for appropriate columns.
2649 class Suite(ORMObject):
2650 def __init__(self, suite_name = None, version = None):
2651 self.suite_name = suite_name
2652 self.version = version
2654 def properties(self):
2655 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2658 def not_null_constraints(self):
2659 return ['suite_name', 'version']
2661 def __eq__(self, val):
2662 if isinstance(val, str):
2663 return (self.suite_name == val)
2664 # This signals to use the normal comparison operator
2665 return NotImplemented
2667 def __ne__(self, val):
2668 if isinstance(val, str):
2669 return (self.suite_name != val)
2670 # This signals to use the normal comparison operator
2671 return NotImplemented
2675 for disp, field in SUITE_FIELDS:
2676 val = getattr(self, field, None)
2678 ret.append("%s: %s" % (disp, val))
2680 return "\n".join(ret)
2682 def get_architectures(self, skipsrc=False, skipall=False):
2684 Returns list of Architecture objects
2686 @type skipsrc: boolean
2687 @param skipsrc: Whether to skip returning the 'source' architecture entry
2690 @type skipall: boolean
2691 @param skipall: Whether to skip returning the 'all' architecture entry
2695 @return: list of Architecture objects for the given name (may be empty)
2698 q = object_session(self).query(Architecture).with_parent(self)
2700 q = q.filter(Architecture.arch_string != 'source')
2702 q = q.filter(Architecture.arch_string != 'all')
2703 return q.order_by(Architecture.arch_string).all()
2705 def get_sources(self, source):
2707 Returns a query object representing DBSource that is part of C{suite}.
2709 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2711 @type source: string
2712 @param source: source package name
2714 @rtype: sqlalchemy.orm.query.Query
2715 @return: a query of DBSource
2719 session = object_session(self)
2720 return session.query(DBSource).filter_by(source = source). \
2723 __all__.append('Suite')
2726 def get_suite(suite, session=None):
2728 Returns Suite object for given C{suite name}.
2731 @param suite: The name of the suite
2733 @type session: Session
2734 @param session: Optional SQLA session object (a temporary one will be
2735 generated if not supplied)
2738 @return: Suite object for the requested suite name (None if not present)
2741 q = session.query(Suite).filter_by(suite_name=suite)
2745 except NoResultFound:
2748 __all__.append('get_suite')
2750 ################################################################################
2752 # TODO: should be removed because the implementation is too trivial
2754 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2756 Returns list of Architecture objects for given C{suite} name
2759 @param suite: Suite name to search for
2761 @type skipsrc: boolean
2762 @param skipsrc: Whether to skip returning the 'source' architecture entry
2765 @type skipall: boolean
2766 @param skipall: Whether to skip returning the 'all' architecture entry
2769 @type session: Session
2770 @param session: Optional SQL session object (a temporary one will be
2771 generated if not supplied)
2774 @return: list of Architecture objects for the given name (may be empty)
2777 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2779 __all__.append('get_suite_architectures')
2781 ################################################################################
2783 class SuiteSrcFormat(object):
2784 def __init__(self, *args, **kwargs):
2788 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2790 __all__.append('SuiteSrcFormat')
2793 def get_suite_src_formats(suite, session=None):
2795 Returns list of allowed SrcFormat for C{suite}.
2798 @param suite: Suite name to search for
2800 @type session: Session
2801 @param session: Optional SQL session object (a temporary one will be
2802 generated if not supplied)
2805 @return: the list of allowed source formats for I{suite}
2808 q = session.query(SrcFormat)
2809 q = q.join(SuiteSrcFormat)
2810 q = q.join(Suite).filter_by(suite_name=suite)
2811 q = q.order_by('format_name')
2815 __all__.append('get_suite_src_formats')
2817 ################################################################################
2819 class Uid(ORMObject):
2820 def __init__(self, uid = None, name = None):
2824 def __eq__(self, val):
2825 if isinstance(val, str):
2826 return (self.uid == val)
2827 # This signals to use the normal comparison operator
2828 return NotImplemented
2830 def __ne__(self, val):
2831 if isinstance(val, str):
2832 return (self.uid != val)
2833 # This signals to use the normal comparison operator
2834 return NotImplemented
2836 def properties(self):
2837 return ['uid', 'name', 'fingerprint']
2839 def not_null_constraints(self):
2842 __all__.append('Uid')
2845 def get_or_set_uid(uidname, session=None):
2847 Returns uid object for given uidname.
2849 If no matching uidname is found, a row is inserted.
2851 @type uidname: string
2852 @param uidname: The uid to add
2854 @type session: SQLAlchemy
2855 @param session: Optional SQL session object (a temporary one will be
2856 generated if not supplied). If not passed, a commit will be performed at
2857 the end of the function, otherwise the caller is responsible for commiting.
2860 @return: the uid object for the given uidname
2863 q = session.query(Uid).filter_by(uid=uidname)
2867 except NoResultFound:
2871 session.commit_or_flush()
2876 __all__.append('get_or_set_uid')
2879 def get_uid_from_fingerprint(fpr, session=None):
2880 q = session.query(Uid)
2881 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2885 except NoResultFound:
2888 __all__.append('get_uid_from_fingerprint')
2890 ################################################################################
2892 class UploadBlock(object):
2893 def __init__(self, *args, **kwargs):
2897 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2899 __all__.append('UploadBlock')
2901 ################################################################################
2903 class DBConn(object):
2905 database module init.
2909 def __init__(self, *args, **kwargs):
2910 self.__dict__ = self.__shared_state
2912 if not getattr(self, 'initialised', False):
2913 self.initialised = True
2914 self.debug = kwargs.has_key('debug')
2917 def __setuptables(self):
2918 tables_with_primary = (
2926 'build_queue_files',
2931 'changes_pending_binaries',
2932 'changes_pending_files',
2933 'changes_pending_source',
2943 'pending_bin_contents',
2957 tables_no_primary = (
2958 'changes_pending_files_map',
2959 'changes_pending_source_files',
2960 'changes_pool_files',
2962 # TODO: the maintainer column in table override should be removed.
2964 'suite_architectures',
2965 'suite_src_formats',
2966 'suite_build_queue_copy',
2971 'almost_obsolete_all_associations',
2972 'almost_obsolete_src_associations',
2973 'any_associations_source',
2974 'bin_assoc_by_arch',
2975 'bin_associations_binaries',
2976 'binaries_suite_arch',
2977 'binfiles_suite_component_arch',
2980 'newest_all_associations',
2981 'newest_any_associations',
2983 'newest_src_association',
2984 'obsolete_all_associations',
2985 'obsolete_any_associations',
2986 'obsolete_any_by_all_associations',
2987 'obsolete_src_associations',
2989 'src_associations_bin',
2990 'src_associations_src',
2991 'suite_arch_by_name',
2994 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2995 # correctly and that is why we have to use a workaround. It can
2996 # be removed as soon as we switch to version 0.6.
2997 for table_name in tables_with_primary:
2998 table = Table(table_name, self.db_meta, \
2999 Column('id', Integer, primary_key = True), \
3000 autoload=True, useexisting=True)
3001 setattr(self, 'tbl_%s' % table_name, table)
3003 for table_name in tables_no_primary:
3004 table = Table(table_name, self.db_meta, autoload=True)
3005 setattr(self, 'tbl_%s' % table_name, table)
3007 # bin_contents needs special attention until update #41 has been
3009 self.tbl_bin_contents = Table('bin_contents', self.db_meta, \
3010 Column('file', Text, primary_key = True),
3011 Column('binary_id', Integer, ForeignKey('binaries.id'), \
3012 primary_key = True),
3013 autoload=True, useexisting=True)
3015 for view_name in views:
3016 view = Table(view_name, self.db_meta, autoload=True)
3017 setattr(self, 'view_%s' % view_name, view)
3019 def __setupmappers(self):
3020 mapper(Architecture, self.tbl_architecture,
3021 properties = dict(arch_id = self.tbl_architecture.c.id,
3022 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3023 order_by='suite_name',
3024 backref=backref('architectures', order_by='arch_string'))),
3025 extension = validator)
3027 mapper(Archive, self.tbl_archive,
3028 properties = dict(archive_id = self.tbl_archive.c.id,
3029 archive_name = self.tbl_archive.c.name))
3031 mapper(PendingBinContents, self.tbl_pending_bin_contents,
3032 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3033 filename = self.tbl_pending_bin_contents.c.filename,
3034 package = self.tbl_pending_bin_contents.c.package,
3035 version = self.tbl_pending_bin_contents.c.version,
3036 arch = self.tbl_pending_bin_contents.c.arch,
3037 otype = self.tbl_pending_bin_contents.c.type))
3039 mapper(DebContents, self.tbl_deb_contents,
3040 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3041 package=self.tbl_deb_contents.c.package,
3042 suite=self.tbl_deb_contents.c.suite,
3043 arch=self.tbl_deb_contents.c.arch,
3044 section=self.tbl_deb_contents.c.section,
3045 filename=self.tbl_deb_contents.c.filename))
3047 mapper(UdebContents, self.tbl_udeb_contents,
3048 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3049 package=self.tbl_udeb_contents.c.package,
3050 suite=self.tbl_udeb_contents.c.suite,
3051 arch=self.tbl_udeb_contents.c.arch,
3052 section=self.tbl_udeb_contents.c.section,
3053 filename=self.tbl_udeb_contents.c.filename))
3055 mapper(BuildQueue, self.tbl_build_queue,
3056 properties = dict(queue_id = self.tbl_build_queue.c.id))
3058 mapper(BuildQueueFile, self.tbl_build_queue_files,
3059 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3060 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3062 mapper(DBBinary, self.tbl_binaries,
3063 properties = dict(binary_id = self.tbl_binaries.c.id,
3064 package = self.tbl_binaries.c.package,
3065 version = self.tbl_binaries.c.version,
3066 maintainer_id = self.tbl_binaries.c.maintainer,
3067 maintainer = relation(Maintainer),
3068 source_id = self.tbl_binaries.c.source,
3069 source = relation(DBSource, backref='binaries'),
3070 arch_id = self.tbl_binaries.c.architecture,
3071 architecture = relation(Architecture),
3072 poolfile_id = self.tbl_binaries.c.file,
3073 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3074 binarytype = self.tbl_binaries.c.type,
3075 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3076 fingerprint = relation(Fingerprint),
3077 install_date = self.tbl_binaries.c.install_date,
3078 suites = relation(Suite, secondary=self.tbl_bin_associations,
3079 backref=backref('binaries', lazy='dynamic'))),
3080 extension = validator)
3082 mapper(BinaryACL, self.tbl_binary_acl,
3083 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3085 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3086 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3087 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3088 architecture = relation(Architecture)))
3090 mapper(Component, self.tbl_component,
3091 properties = dict(component_id = self.tbl_component.c.id,
3092 component_name = self.tbl_component.c.name),
3093 extension = validator)
3095 mapper(DBConfig, self.tbl_config,
3096 properties = dict(config_id = self.tbl_config.c.id))
3098 mapper(DSCFile, self.tbl_dsc_files,
3099 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3100 source_id = self.tbl_dsc_files.c.source,
3101 source = relation(DBSource),
3102 poolfile_id = self.tbl_dsc_files.c.file,
3103 poolfile = relation(PoolFile)))
3105 mapper(PoolFile, self.tbl_files,
3106 properties = dict(file_id = self.tbl_files.c.id,
3107 filesize = self.tbl_files.c.size,
3108 location_id = self.tbl_files.c.location,
3109 location = relation(Location,
3110 # using lazy='dynamic' in the back
3111 # reference because we have A LOT of
3112 # files in one location
3113 backref=backref('files', lazy='dynamic'))),
3114 extension = validator)
3116 mapper(Fingerprint, self.tbl_fingerprint,
3117 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3118 uid_id = self.tbl_fingerprint.c.uid,
3119 uid = relation(Uid),
3120 keyring_id = self.tbl_fingerprint.c.keyring,
3121 keyring = relation(Keyring),
3122 source_acl = relation(SourceACL),
3123 binary_acl = relation(BinaryACL)),
3124 extension = validator)
3126 mapper(Keyring, self.tbl_keyrings,
3127 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3128 keyring_id = self.tbl_keyrings.c.id))
3130 mapper(DBChange, self.tbl_changes,
3131 properties = dict(change_id = self.tbl_changes.c.id,
3132 poolfiles = relation(PoolFile,
3133 secondary=self.tbl_changes_pool_files,
3134 backref="changeslinks"),
3135 seen = self.tbl_changes.c.seen,
3136 source = self.tbl_changes.c.source,
3137 binaries = self.tbl_changes.c.binaries,
3138 architecture = self.tbl_changes.c.architecture,
3139 distribution = self.tbl_changes.c.distribution,
3140 urgency = self.tbl_changes.c.urgency,
3141 maintainer = self.tbl_changes.c.maintainer,
3142 changedby = self.tbl_changes.c.changedby,
3143 date = self.tbl_changes.c.date,
3144 version = self.tbl_changes.c.version,
3145 files = relation(ChangePendingFile,
3146 secondary=self.tbl_changes_pending_files_map,
3147 backref="changesfile"),
3148 in_queue_id = self.tbl_changes.c.in_queue,
3149 in_queue = relation(PolicyQueue,
3150 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3151 approved_for_id = self.tbl_changes.c.approved_for))
3153 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3154 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3156 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3157 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3158 filename = self.tbl_changes_pending_files.c.filename,
3159 size = self.tbl_changes_pending_files.c.size,
3160 md5sum = self.tbl_changes_pending_files.c.md5sum,
3161 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3162 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3164 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3165 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3166 change = relation(DBChange),
3167 maintainer = relation(Maintainer,
3168 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3169 changedby = relation(Maintainer,
3170 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3171 fingerprint = relation(Fingerprint),
3172 source_files = relation(ChangePendingFile,
3173 secondary=self.tbl_changes_pending_source_files,
3174 backref="pending_sources")))
3177 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3178 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3179 keyring = relation(Keyring, backref="keyring_acl_map"),
3180 architecture = relation(Architecture)))
3182 mapper(Location, self.tbl_location,
3183 properties = dict(location_id = self.tbl_location.c.id,
3184 component_id = self.tbl_location.c.component,
3185 component = relation(Component, backref='location'),
3186 archive_id = self.tbl_location.c.archive,
3187 archive = relation(Archive),
3188 # FIXME: the 'type' column is old cruft and
3189 # should be removed in the future.
3190 archive_type = self.tbl_location.c.type),
3191 extension = validator)
3193 mapper(Maintainer, self.tbl_maintainer,
3194 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3195 maintains_sources = relation(DBSource, backref='maintainer',
3196 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3197 changed_sources = relation(DBSource, backref='changedby',
3198 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3199 extension = validator)
3201 mapper(NewComment, self.tbl_new_comments,
3202 properties = dict(comment_id = self.tbl_new_comments.c.id))
3204 mapper(Override, self.tbl_override,
3205 properties = dict(suite_id = self.tbl_override.c.suite,
3206 suite = relation(Suite, \
3207 backref=backref('overrides', lazy='dynamic')),
3208 package = self.tbl_override.c.package,
3209 component_id = self.tbl_override.c.component,
3210 component = relation(Component, \
3211 backref=backref('overrides', lazy='dynamic')),
3212 priority_id = self.tbl_override.c.priority,
3213 priority = relation(Priority, \
3214 backref=backref('overrides', lazy='dynamic')),
3215 section_id = self.tbl_override.c.section,
3216 section = relation(Section, \
3217 backref=backref('overrides', lazy='dynamic')),
3218 overridetype_id = self.tbl_override.c.type,
3219 overridetype = relation(OverrideType, \
3220 backref=backref('overrides', lazy='dynamic'))))
3222 mapper(OverrideType, self.tbl_override_type,
3223 properties = dict(overridetype = self.tbl_override_type.c.type,
3224 overridetype_id = self.tbl_override_type.c.id))
3226 mapper(PolicyQueue, self.tbl_policy_queue,
3227 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3229 mapper(Priority, self.tbl_priority,
3230 properties = dict(priority_id = self.tbl_priority.c.id))
3232 mapper(Section, self.tbl_section,
3233 properties = dict(section_id = self.tbl_section.c.id,
3234 section=self.tbl_section.c.section))
3236 mapper(DBSource, self.tbl_source,
3237 properties = dict(source_id = self.tbl_source.c.id,
3238 version = self.tbl_source.c.version,
3239 maintainer_id = self.tbl_source.c.maintainer,
3240 poolfile_id = self.tbl_source.c.file,
3241 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3242 fingerprint_id = self.tbl_source.c.sig_fpr,
3243 fingerprint = relation(Fingerprint),
3244 changedby_id = self.tbl_source.c.changedby,
3245 srcfiles = relation(DSCFile,
3246 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3247 suites = relation(Suite, secondary=self.tbl_src_associations,
3248 backref=backref('sources', lazy='dynamic')),
3249 srcuploaders = relation(SrcUploader)),
3250 extension = validator)
3252 mapper(SourceACL, self.tbl_source_acl,
3253 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3255 mapper(SrcFormat, self.tbl_src_format,
3256 properties = dict(src_format_id = self.tbl_src_format.c.id,
3257 format_name = self.tbl_src_format.c.format_name))
3259 mapper(SrcUploader, self.tbl_src_uploaders,
3260 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3261 source_id = self.tbl_src_uploaders.c.source,
3262 source = relation(DBSource,
3263 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3264 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3265 maintainer = relation(Maintainer,
3266 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3268 mapper(Suite, self.tbl_suite,
3269 properties = dict(suite_id = self.tbl_suite.c.id,
3270 policy_queue = relation(PolicyQueue),
3271 copy_queues = relation(BuildQueue,
3272 secondary=self.tbl_suite_build_queue_copy)),
3273 extension = validator)
3275 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3276 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3277 suite = relation(Suite, backref='suitesrcformats'),
3278 src_format_id = self.tbl_suite_src_formats.c.src_format,
3279 src_format = relation(SrcFormat)))
3281 mapper(Uid, self.tbl_uid,
3282 properties = dict(uid_id = self.tbl_uid.c.id,
3283 fingerprint = relation(Fingerprint)),
3284 extension = validator)
3286 mapper(UploadBlock, self.tbl_upload_blocks,
3287 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3288 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3289 uid = relation(Uid, backref="uploadblocks")))
3291 mapper(BinContents, self.tbl_bin_contents,
3293 binary = relation(DBBinary,
3294 backref=backref('contents', lazy='dynamic', cascade='all')),
3295 file = self.tbl_bin_contents.c.file))
3297 ## Connection functions
3298 def __createconn(self):
3299 from config import Config
3303 connstr = "postgres://%s" % cnf["DB::Host"]
3304 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3305 connstr += ":%s" % cnf["DB::Port"]
3306 connstr += "/%s" % cnf["DB::Name"]
3309 connstr = "postgres:///%s" % cnf["DB::Name"]
3310 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3311 connstr += "?port=%s" % cnf["DB::Port"]
3313 engine_args = { 'echo': self.debug }
3314 if cnf.has_key('DB::PoolSize'):
3315 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3316 if cnf.has_key('DB::MaxOverflow'):
3317 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3318 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3319 cnf['DB::Unicode'] == 'false':
3320 engine_args['use_native_unicode'] = False
3322 self.db_pg = create_engine(connstr, **engine_args)
3323 self.db_meta = MetaData()
3324 self.db_meta.bind = self.db_pg
3325 self.db_smaker = sessionmaker(bind=self.db_pg,
3329 self.__setuptables()
3330 self.__setupmappers()
3333 return self.db_smaker()
3335 __all__.append('DBConn')