5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper
63 from sqlalchemy import types as sqltypes
65 # Don't remove this, we re-export the exceptions to scripts which import us
66 from sqlalchemy.exc import *
67 from sqlalchemy.orm.exc import NoResultFound
69 # Only import Config until Queue stuff is changed to store its config
71 from config import Config
72 from textutils import fix_maintainer
73 from dak_exceptions import DBUpdateError, NoSourceFieldError
75 # suppress some deprecation warnings in squeeze related to sqlalchemy
77 warnings.filterwarnings('ignore', \
78 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
80 # TODO: sqlalchemy needs some extra configuration to correctly reflect
81 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
82 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
85 ################################################################################
87 # Patch in support for the debversion field type so that it works during
91 # that is for sqlalchemy 0.6
92 UserDefinedType = sqltypes.UserDefinedType
94 # this one for sqlalchemy 0.5
95 UserDefinedType = sqltypes.TypeEngine
97 class DebVersion(UserDefinedType):
98 def get_col_spec(self):
101 def bind_processor(self, dialect):
104 # ' = None' is needed for sqlalchemy 0.5:
105 def result_processor(self, dialect, coltype = None):
108 sa_major_version = sqlalchemy.__version__[0:3]
109 if sa_major_version in ["0.5", "0.6"]:
110 from sqlalchemy.databases import postgres
111 postgres.ischema_names['debversion'] = DebVersion
113 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
115 ################################################################################
117 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
119 ################################################################################
121 def session_wrapper(fn):
123 Wrapper around common ".., session=None):" handling. If the wrapped
124 function is called without passing 'session', we create a local one
125 and destroy it when the function ends.
127 Also attaches a commit_or_flush method to the session; if we created a
128 local session, this is a synonym for session.commit(), otherwise it is a
129 synonym for session.flush().
132 def wrapped(*args, **kwargs):
133 private_transaction = False
135 # Find the session object
136 session = kwargs.get('session')
139 if len(args) <= len(getargspec(fn)[0]) - 1:
140 # No session specified as last argument or in kwargs
141 private_transaction = True
142 session = kwargs['session'] = DBConn().session()
144 # Session is last argument in args
148 session = args[-1] = DBConn().session()
149 private_transaction = True
151 if private_transaction:
152 session.commit_or_flush = session.commit
154 session.commit_or_flush = session.flush
157 return fn(*args, **kwargs)
159 if private_transaction:
160 # We created a session; close it.
163 wrapped.__doc__ = fn.__doc__
164 wrapped.func_name = fn.func_name
168 __all__.append('session_wrapper')
170 ################################################################################
172 class ORMObject(object):
174 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
175 derived classes must implement the properties() method.
178 def properties(self):
180 This method should be implemented by all derived classes and returns a
181 list of the important properties. The properties 'created' and
182 'modified' will be added automatically. A suffix '_count' should be
183 added to properties that are lists or query objects. The most important
184 property name should be returned as the first element in the list
185 because it is used by repr().
191 Returns a JSON representation of the object based on the properties
192 returned from the properties() method.
195 # add created and modified
196 all_properties = self.properties() + ['created', 'modified']
197 for property in all_properties:
198 # check for list or query
199 if property[-6:] == '_count':
200 real_property = property[:-6]
201 if not hasattr(self, real_property):
203 value = getattr(self, real_property)
204 if hasattr(value, '__len__'):
207 elif hasattr(value, 'count'):
209 value = value.count()
211 raise KeyError('Do not understand property %s.' % property)
213 if not hasattr(self, property):
216 value = getattr(self, property)
220 elif isinstance(value, ORMObject):
221 # use repr() for ORMObject types
224 # we want a string for all other types because json cannot
227 data[property] = value
228 return json.dumps(data)
232 Returns the name of the class.
234 return type(self).__name__
238 Returns a short string representation of the object using the first
239 element from the properties() method.
241 primary_property = self.properties()[0]
242 value = getattr(self, primary_property)
243 return '<%s %s>' % (self.classname(), str(value))
247 Returns a human readable form of the object using the properties()
250 return '<%s %s>' % (self.classname(), self.json())
252 def not_null_constraints(self):
254 Returns a list of properties that must be not NULL. Derived classes
255 should override this method if needed.
259 validation_message = \
260 "Validation failed because property '%s' must not be empty in object\n%s"
264 This function validates the not NULL constraints as returned by
265 not_null_constraints(). It raises the DBUpdateError exception if
268 for property in self.not_null_constraints():
269 # TODO: It is a bit awkward that the mapper configuration allow
270 # directly setting the numeric _id columns. We should get rid of it
272 if hasattr(self, property + '_id') and \
273 getattr(self, property + '_id') is not None:
275 if not hasattr(self, property) or getattr(self, property) is None:
276 raise DBUpdateError(self.validation_message % \
277 (property, str(self)))
281 def get(cls, primary_key, session = None):
283 This is a support function that allows getting an object by its primary
286 Architecture.get(3[, session])
288 instead of the more verbose
290 session.query(Architecture).get(3)
292 return session.query(cls).get(primary_key)
294 def session(self, replace = False):
296 Returns the current session that is associated with the object. May
297 return None is object is in detached state.
300 return object_session(self)
302 def clone(self, session = None):
304 Clones the current object in a new session and returns the new clone. A
305 fresh session is created if the optional session parameter is not
306 provided. The function will fail if a session is provided and has
309 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
310 an existing object to allow several threads to work with their own
311 instances of an ORMObject.
313 WARNING: Only persistent (committed) objects can be cloned. Changes
314 made to the original object that are not committed yet will get lost.
315 The session of the new object will always be rolled back to avoid
319 if self.session() is None:
320 raise RuntimeError( \
321 'Method clone() failed for detached object:\n%s' % self)
322 self.session().flush()
323 mapper = object_mapper(self)
324 primary_key = mapper.primary_key_from_instance(self)
325 object_class = self.__class__
327 session = DBConn().session()
328 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
329 raise RuntimeError( \
330 'Method clone() failed due to unflushed changes in session.')
331 new_object = session.query(object_class).get(primary_key)
333 if new_object is None:
334 raise RuntimeError( \
335 'Method clone() failed for non-persistent object:\n%s' % self)
338 __all__.append('ORMObject')
340 ################################################################################
342 class Validator(MapperExtension):
344 This class calls the validate() method for each instance for the
345 'before_update' and 'before_insert' events. A global object validator is
346 used for configuring the individual mappers.
349 def before_update(self, mapper, connection, instance):
353 def before_insert(self, mapper, connection, instance):
357 validator = Validator()
359 ################################################################################
361 class Architecture(ORMObject):
362 def __init__(self, arch_string = None, description = None):
363 self.arch_string = arch_string
364 self.description = description
366 def __eq__(self, val):
367 if isinstance(val, str):
368 return (self.arch_string== val)
369 # This signals to use the normal comparison operator
370 return NotImplemented
372 def __ne__(self, val):
373 if isinstance(val, str):
374 return (self.arch_string != val)
375 # This signals to use the normal comparison operator
376 return NotImplemented
378 def properties(self):
379 return ['arch_string', 'arch_id', 'suites_count']
381 def not_null_constraints(self):
382 return ['arch_string']
384 __all__.append('Architecture')
387 def get_architecture(architecture, session=None):
389 Returns database id for given C{architecture}.
391 @type architecture: string
392 @param architecture: The name of the architecture
394 @type session: Session
395 @param session: Optional SQLA session object (a temporary one will be
396 generated if not supplied)
399 @return: Architecture object for the given arch (None if not present)
402 q = session.query(Architecture).filter_by(arch_string=architecture)
406 except NoResultFound:
409 __all__.append('get_architecture')
411 # TODO: should be removed because the implementation is too trivial
413 def get_architecture_suites(architecture, session=None):
415 Returns list of Suite objects for given C{architecture} name
417 @type architecture: str
418 @param architecture: Architecture name to search for
420 @type session: Session
421 @param session: Optional SQL session object (a temporary one will be
422 generated if not supplied)
425 @return: list of Suite objects for the given name (may be empty)
428 return get_architecture(architecture, session).suites
430 __all__.append('get_architecture_suites')
432 ################################################################################
434 class Archive(object):
435 def __init__(self, *args, **kwargs):
439 return '<Archive %s>' % self.archive_name
441 __all__.append('Archive')
444 def get_archive(archive, session=None):
446 returns database id for given C{archive}.
448 @type archive: string
449 @param archive: the name of the arhive
451 @type session: Session
452 @param session: Optional SQLA session object (a temporary one will be
453 generated if not supplied)
456 @return: Archive object for the given name (None if not present)
459 archive = archive.lower()
461 q = session.query(Archive).filter_by(archive_name=archive)
465 except NoResultFound:
468 __all__.append('get_archive')
470 ################################################################################
472 class BinContents(ORMObject):
473 def __init__(self, file = None, binary = None):
477 def properties(self):
478 return ['file', 'binary']
480 __all__.append('BinContents')
482 ################################################################################
484 class DBBinary(ORMObject):
485 def __init__(self, package = None, source = None, version = None, \
486 maintainer = None, architecture = None, poolfile = None, \
488 self.package = package
490 self.version = version
491 self.maintainer = maintainer
492 self.architecture = architecture
493 self.poolfile = poolfile
494 self.binarytype = binarytype
496 def properties(self):
497 return ['package', 'version', 'maintainer', 'source', 'architecture', \
498 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
499 'suites_count', 'binary_id', 'contents_count']
501 def not_null_constraints(self):
502 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
505 def get_component_name(self):
506 return self.poolfile.location.component.component_name
508 def scan_contents(self):
510 Yields the contents of the package. Only regular files are yielded and
511 the path names are normalized after converting them from either utf-8 or
514 fullpath = self.poolfile.fullpath
515 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE)
516 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
517 for member in tar.getmembers():
519 name = normpath(member.name)
520 # enforce proper utf-8 encoding
523 except UnicodeDecodeError:
524 name = name.decode('iso8859-1').encode('utf-8')
530 __all__.append('DBBinary')
533 def get_suites_binary_in(package, session=None):
535 Returns list of Suite objects which given C{package} name is in
538 @param package: DBBinary package name to search for
541 @return: list of Suite objects for the given package
544 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
546 __all__.append('get_suites_binary_in')
549 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
551 Returns the component name of the newest binary package in suite_list or
552 None if no package is found. The result can be optionally filtered by a list
553 of architecture names.
556 @param package: DBBinary package name to search for
558 @type suite_list: list of str
559 @param suite_list: list of suite_name items
561 @type arch_list: list of str
562 @param arch_list: optional list of arch_string items that defaults to []
564 @rtype: str or NoneType
565 @return: name of component or None
568 q = session.query(DBBinary).filter_by(package = package). \
569 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
570 if len(arch_list) > 0:
571 q = q.join(DBBinary.architecture). \
572 filter(Architecture.arch_string.in_(arch_list))
573 binary = q.order_by(desc(DBBinary.version)).first()
577 return binary.get_component_name()
579 __all__.append('get_component_by_package_suite')
581 ################################################################################
583 class BinaryACL(object):
584 def __init__(self, *args, **kwargs):
588 return '<BinaryACL %s>' % self.binary_acl_id
590 __all__.append('BinaryACL')
592 ################################################################################
594 class BinaryACLMap(object):
595 def __init__(self, *args, **kwargs):
599 return '<BinaryACLMap %s>' % self.binary_acl_map_id
601 __all__.append('BinaryACLMap')
603 ################################################################################
608 ArchiveDir "%(archivepath)s";
609 OverrideDir "%(overridedir)s";
610 CacheDir "%(cachedir)s";
615 Packages::Compress ". bzip2 gzip";
616 Sources::Compress ". bzip2 gzip";
621 bindirectory "incoming"
626 BinOverride "override.sid.all3";
627 BinCacheDB "packages-accepted.db";
629 FileList "%(filelist)s";
632 Packages::Extensions ".deb .udeb";
635 bindirectory "incoming/"
638 BinOverride "override.sid.all3";
639 SrcOverride "override.sid.all3.src";
640 FileList "%(filelist)s";
644 class BuildQueue(object):
645 def __init__(self, *args, **kwargs):
649 return '<BuildQueue %s>' % self.queue_name
651 def write_metadata(self, starttime, force=False):
652 # Do we write out metafiles?
653 if not (force or self.generate_metadata):
656 session = DBConn().session().object_session(self)
658 fl_fd = fl_name = ac_fd = ac_name = None
660 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
661 startdir = os.getcwd()
664 # Grab files we want to include
665 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
666 # Write file list with newer files
667 (fl_fd, fl_name) = mkstemp()
669 os.write(fl_fd, '%s\n' % n.fullpath)
674 # Write minimal apt.conf
675 # TODO: Remove hardcoding from template
676 (ac_fd, ac_name) = mkstemp()
677 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
679 'cachedir': cnf["Dir::Cache"],
680 'overridedir': cnf["Dir::Override"],
684 # Run apt-ftparchive generate
685 os.chdir(os.path.dirname(ac_name))
686 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
688 # Run apt-ftparchive release
689 # TODO: Eww - fix this
690 bname = os.path.basename(self.path)
694 # We have to remove the Release file otherwise it'll be included in the
697 os.unlink(os.path.join(bname, 'Release'))
701 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
703 # Crude hack with open and append, but this whole section is and should be redone.
704 if self.notautomatic:
705 release=open("Release", "a")
706 release.write("NotAutomatic: yes")
711 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
712 if cnf.has_key("Dinstall::SigningPubKeyring"):
713 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
715 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
717 # Move the files if we got this far
718 os.rename('Release', os.path.join(bname, 'Release'))
720 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
722 # Clean up any left behind files
749 def clean_and_update(self, starttime, Logger, dryrun=False):
750 """WARNING: This routine commits for you"""
751 session = DBConn().session().object_session(self)
753 if self.generate_metadata and not dryrun:
754 self.write_metadata(starttime)
756 # Grab files older than our execution time
757 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
763 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
765 Logger.log(["I: Removing %s from the queue" % o.fullpath])
766 os.unlink(o.fullpath)
769 # If it wasn't there, don't worry
770 if e.errno == ENOENT:
773 # TODO: Replace with proper logging call
774 Logger.log(["E: Could not remove %s" % o.fullpath])
781 for f in os.listdir(self.path):
782 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
786 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
787 except NoResultFound:
788 fp = os.path.join(self.path, f)
790 Logger.log(["I: Would remove unused link %s" % fp])
792 Logger.log(["I: Removing unused link %s" % fp])
796 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
798 def add_file_from_pool(self, poolfile):
799 """Copies a file into the pool. Assumes that the PoolFile object is
800 attached to the same SQLAlchemy session as the Queue object is.
802 The caller is responsible for committing after calling this function."""
803 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
805 # Check if we have a file of this name or this ID already
806 for f in self.queuefiles:
807 if f.fileid is not None and f.fileid == poolfile.file_id or \
808 f.poolfile.filename == poolfile_basename:
809 # In this case, update the BuildQueueFile entry so we
810 # don't remove it too early
811 f.lastused = datetime.now()
812 DBConn().session().object_session(poolfile).add(f)
815 # Prepare BuildQueueFile object
816 qf = BuildQueueFile()
817 qf.build_queue_id = self.queue_id
818 qf.lastused = datetime.now()
819 qf.filename = poolfile_basename
821 targetpath = poolfile.fullpath
822 queuepath = os.path.join(self.path, poolfile_basename)
826 # We need to copy instead of symlink
828 utils.copy(targetpath, queuepath)
829 # NULL in the fileid field implies a copy
832 os.symlink(targetpath, queuepath)
833 qf.fileid = poolfile.file_id
837 # Get the same session as the PoolFile is using and add the qf to it
838 DBConn().session().object_session(poolfile).add(qf)
843 __all__.append('BuildQueue')
846 def get_build_queue(queuename, session=None):
848 Returns BuildQueue object for given C{queue name}, creating it if it does not
851 @type queuename: string
852 @param queuename: The name of the queue
854 @type session: Session
855 @param session: Optional SQLA session object (a temporary one will be
856 generated if not supplied)
859 @return: BuildQueue object for the given queue
862 q = session.query(BuildQueue).filter_by(queue_name=queuename)
866 except NoResultFound:
869 __all__.append('get_build_queue')
871 ################################################################################
873 class BuildQueueFile(object):
874 def __init__(self, *args, **kwargs):
878 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
882 return os.path.join(self.buildqueue.path, self.filename)
885 __all__.append('BuildQueueFile')
887 ################################################################################
889 class ChangePendingBinary(object):
890 def __init__(self, *args, **kwargs):
894 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
896 __all__.append('ChangePendingBinary')
898 ################################################################################
900 class ChangePendingFile(object):
901 def __init__(self, *args, **kwargs):
905 return '<ChangePendingFile %s>' % self.change_pending_file_id
907 __all__.append('ChangePendingFile')
909 ################################################################################
911 class ChangePendingSource(object):
912 def __init__(self, *args, **kwargs):
916 return '<ChangePendingSource %s>' % self.change_pending_source_id
918 __all__.append('ChangePendingSource')
920 ################################################################################
922 class Component(ORMObject):
923 def __init__(self, component_name = None):
924 self.component_name = component_name
926 def __eq__(self, val):
927 if isinstance(val, str):
928 return (self.component_name == val)
929 # This signals to use the normal comparison operator
930 return NotImplemented
932 def __ne__(self, val):
933 if isinstance(val, str):
934 return (self.component_name != val)
935 # This signals to use the normal comparison operator
936 return NotImplemented
938 def properties(self):
939 return ['component_name', 'component_id', 'description', \
940 'location_count', 'meets_dfsg', 'overrides_count']
942 def not_null_constraints(self):
943 return ['component_name']
946 __all__.append('Component')
949 def get_component(component, session=None):
951 Returns database id for given C{component}.
953 @type component: string
954 @param component: The name of the override type
957 @return: the database id for the given component
960 component = component.lower()
962 q = session.query(Component).filter_by(component_name=component)
966 except NoResultFound:
969 __all__.append('get_component')
971 ################################################################################
973 class DBConfig(object):
974 def __init__(self, *args, **kwargs):
978 return '<DBConfig %s>' % self.name
980 __all__.append('DBConfig')
982 ################################################################################
985 def get_or_set_contents_file_id(filename, session=None):
987 Returns database id for given filename.
989 If no matching file is found, a row is inserted.
991 @type filename: string
992 @param filename: The filename
993 @type session: SQLAlchemy
994 @param session: Optional SQL session object (a temporary one will be
995 generated if not supplied). If not passed, a commit will be performed at
996 the end of the function, otherwise the caller is responsible for commiting.
999 @return: the database id for the given component
1002 q = session.query(ContentFilename).filter_by(filename=filename)
1005 ret = q.one().cafilename_id
1006 except NoResultFound:
1007 cf = ContentFilename()
1008 cf.filename = filename
1010 session.commit_or_flush()
1011 ret = cf.cafilename_id
1015 __all__.append('get_or_set_contents_file_id')
1018 def get_contents(suite, overridetype, section=None, session=None):
1020 Returns contents for a suite / overridetype combination, limiting
1021 to a section if not None.
1024 @param suite: Suite object
1026 @type overridetype: OverrideType
1027 @param overridetype: OverrideType object
1029 @type section: Section
1030 @param section: Optional section object to limit results to
1032 @type session: SQLAlchemy
1033 @param session: Optional SQL session object (a temporary one will be
1034 generated if not supplied)
1036 @rtype: ResultsProxy
1037 @return: ResultsProxy object set up to return tuples of (filename, section,
1041 # find me all of the contents for a given suite
1042 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1046 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1047 JOIN content_file_names n ON (c.filename=n.id)
1048 JOIN binaries b ON (b.id=c.binary_pkg)
1049 JOIN override o ON (o.package=b.package)
1050 JOIN section s ON (s.id=o.section)
1051 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1052 AND b.type=:overridetypename"""
1054 vals = {'suiteid': suite.suite_id,
1055 'overridetypeid': overridetype.overridetype_id,
1056 'overridetypename': overridetype.overridetype}
1058 if section is not None:
1059 contents_q += " AND s.id = :sectionid"
1060 vals['sectionid'] = section.section_id
1062 contents_q += " ORDER BY fn"
1064 return session.execute(contents_q, vals)
1066 __all__.append('get_contents')
1068 ################################################################################
1070 class ContentFilepath(object):
1071 def __init__(self, *args, **kwargs):
1075 return '<ContentFilepath %s>' % self.filepath
1077 __all__.append('ContentFilepath')
1080 def get_or_set_contents_path_id(filepath, session=None):
1082 Returns database id for given path.
1084 If no matching file is found, a row is inserted.
1086 @type filepath: string
1087 @param filepath: The filepath
1089 @type session: SQLAlchemy
1090 @param session: Optional SQL session object (a temporary one will be
1091 generated if not supplied). If not passed, a commit will be performed at
1092 the end of the function, otherwise the caller is responsible for commiting.
1095 @return: the database id for the given path
1098 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1101 ret = q.one().cafilepath_id
1102 except NoResultFound:
1103 cf = ContentFilepath()
1104 cf.filepath = filepath
1106 session.commit_or_flush()
1107 ret = cf.cafilepath_id
1111 __all__.append('get_or_set_contents_path_id')
1113 ################################################################################
1115 class ContentAssociation(object):
1116 def __init__(self, *args, **kwargs):
1120 return '<ContentAssociation %s>' % self.ca_id
1122 __all__.append('ContentAssociation')
1124 def insert_content_paths(binary_id, fullpaths, session=None):
1126 Make sure given path is associated with given binary id
1128 @type binary_id: int
1129 @param binary_id: the id of the binary
1130 @type fullpaths: list
1131 @param fullpaths: the list of paths of the file being associated with the binary
1132 @type session: SQLAlchemy session
1133 @param session: Optional SQLAlchemy session. If this is passed, the caller
1134 is responsible for ensuring a transaction has begun and committing the
1135 results or rolling back based on the result code. If not passed, a commit
1136 will be performed at the end of the function, otherwise the caller is
1137 responsible for commiting.
1139 @return: True upon success
1142 privatetrans = False
1144 session = DBConn().session()
1149 def generate_path_dicts():
1150 for fullpath in fullpaths:
1151 if fullpath.startswith( './' ):
1152 fullpath = fullpath[2:]
1154 yield {'filename':fullpath, 'id': binary_id }
1156 for d in generate_path_dicts():
1157 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1166 traceback.print_exc()
1168 # Only rollback if we set up the session ourself
1175 __all__.append('insert_content_paths')
1177 ################################################################################
1179 class DSCFile(object):
1180 def __init__(self, *args, **kwargs):
1184 return '<DSCFile %s>' % self.dscfile_id
1186 __all__.append('DSCFile')
1189 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1191 Returns a list of DSCFiles which may be empty
1193 @type dscfile_id: int (optional)
1194 @param dscfile_id: the dscfile_id of the DSCFiles to find
1196 @type source_id: int (optional)
1197 @param source_id: the source id related to the DSCFiles to find
1199 @type poolfile_id: int (optional)
1200 @param poolfile_id: the poolfile id related to the DSCFiles to find
1203 @return: Possibly empty list of DSCFiles
1206 q = session.query(DSCFile)
1208 if dscfile_id is not None:
1209 q = q.filter_by(dscfile_id=dscfile_id)
1211 if source_id is not None:
1212 q = q.filter_by(source_id=source_id)
1214 if poolfile_id is not None:
1215 q = q.filter_by(poolfile_id=poolfile_id)
1219 __all__.append('get_dscfiles')
1221 ################################################################################
1223 class PoolFile(ORMObject):
1224 def __init__(self, filename = None, location = None, filesize = -1, \
1226 self.filename = filename
1227 self.location = location
1228 self.filesize = filesize
1229 self.md5sum = md5sum
1233 return os.path.join(self.location.path, self.filename)
1235 def is_valid(self, filesize = -1, md5sum = None):
1236 return self.filesize == long(filesize) and self.md5sum == md5sum
1238 def properties(self):
1239 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1240 'sha256sum', 'location', 'source', 'binary', 'last_used']
1242 def not_null_constraints(self):
1243 return ['filename', 'md5sum', 'location']
1245 __all__.append('PoolFile')
1248 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1251 (ValidFileFound [boolean], PoolFile object or None)
1253 @type filename: string
1254 @param filename: the filename of the file to check against the DB
1257 @param filesize: the size of the file to check against the DB
1259 @type md5sum: string
1260 @param md5sum: the md5sum of the file to check against the DB
1262 @type location_id: int
1263 @param location_id: the id of the location to look in
1266 @return: Tuple of length 2.
1267 - If valid pool file found: (C{True}, C{PoolFile object})
1268 - If valid pool file not found:
1269 - (C{False}, C{None}) if no file found
1270 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1273 poolfile = session.query(Location).get(location_id). \
1274 files.filter_by(filename=filename).first()
1276 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1279 return (valid, poolfile)
1281 __all__.append('check_poolfile')
1283 # TODO: the implementation can trivially be inlined at the place where the
1284 # function is called
1286 def get_poolfile_by_id(file_id, session=None):
1288 Returns a PoolFile objects or None for the given id
1291 @param file_id: the id of the file to look for
1293 @rtype: PoolFile or None
1294 @return: either the PoolFile object or None
1297 return session.query(PoolFile).get(file_id)
1299 __all__.append('get_poolfile_by_id')
1302 def get_poolfile_like_name(filename, session=None):
1304 Returns an array of PoolFile objects which are like the given name
1306 @type filename: string
1307 @param filename: the filename of the file to check against the DB
1310 @return: array of PoolFile objects
1313 # TODO: There must be a way of properly using bind parameters with %FOO%
1314 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1318 __all__.append('get_poolfile_like_name')
1321 def add_poolfile(filename, datadict, location_id, session=None):
1323 Add a new file to the pool
1325 @type filename: string
1326 @param filename: filename
1328 @type datadict: dict
1329 @param datadict: dict with needed data
1331 @type location_id: int
1332 @param location_id: database id of the location
1335 @return: the PoolFile object created
1337 poolfile = PoolFile()
1338 poolfile.filename = filename
1339 poolfile.filesize = datadict["size"]
1340 poolfile.md5sum = datadict["md5sum"]
1341 poolfile.sha1sum = datadict["sha1sum"]
1342 poolfile.sha256sum = datadict["sha256sum"]
1343 poolfile.location_id = location_id
1345 session.add(poolfile)
1346 # Flush to get a file id (NB: This is not a commit)
1351 __all__.append('add_poolfile')
1353 ################################################################################
1355 class Fingerprint(ORMObject):
1356 def __init__(self, fingerprint = None):
1357 self.fingerprint = fingerprint
1359 def properties(self):
1360 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1363 def not_null_constraints(self):
1364 return ['fingerprint']
1366 __all__.append('Fingerprint')
1369 def get_fingerprint(fpr, session=None):
1371 Returns Fingerprint object for given fpr.
1374 @param fpr: The fpr to find / add
1376 @type session: SQLAlchemy
1377 @param session: Optional SQL session object (a temporary one will be
1378 generated if not supplied).
1381 @return: the Fingerprint object for the given fpr or None
1384 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1388 except NoResultFound:
1393 __all__.append('get_fingerprint')
1396 def get_or_set_fingerprint(fpr, session=None):
1398 Returns Fingerprint object for given fpr.
1400 If no matching fpr is found, a row is inserted.
1403 @param fpr: The fpr to find / add
1405 @type session: SQLAlchemy
1406 @param session: Optional SQL session object (a temporary one will be
1407 generated if not supplied). If not passed, a commit will be performed at
1408 the end of the function, otherwise the caller is responsible for commiting.
1409 A flush will be performed either way.
1412 @return: the Fingerprint object for the given fpr
1415 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1419 except NoResultFound:
1420 fingerprint = Fingerprint()
1421 fingerprint.fingerprint = fpr
1422 session.add(fingerprint)
1423 session.commit_or_flush()
1428 __all__.append('get_or_set_fingerprint')
1430 ################################################################################
1432 # Helper routine for Keyring class
1433 def get_ldap_name(entry):
1435 for k in ["cn", "mn", "sn"]:
1437 if ret and ret[0] != "" and ret[0] != "-":
1439 return " ".join(name)
1441 ################################################################################
1443 class Keyring(object):
1444 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1445 " --with-colons --fingerprint --fingerprint"
1450 def __init__(self, *args, **kwargs):
1454 return '<Keyring %s>' % self.keyring_name
1456 def de_escape_gpg_str(self, txt):
1457 esclist = re.split(r'(\\x..)', txt)
1458 for x in range(1,len(esclist),2):
1459 esclist[x] = "%c" % (int(esclist[x][2:],16))
1460 return "".join(esclist)
1462 def parse_address(self, uid):
1463 """parses uid and returns a tuple of real name and email address"""
1465 (name, address) = email.Utils.parseaddr(uid)
1466 name = re.sub(r"\s*[(].*[)]", "", name)
1467 name = self.de_escape_gpg_str(name)
1470 return (name, address)
1472 def load_keys(self, keyring):
1473 if not self.keyring_id:
1474 raise Exception('Must be initialized with database information')
1476 k = os.popen(self.gpg_invocation % keyring, "r")
1480 for line in k.xreadlines():
1481 field = line.split(":")
1482 if field[0] == "pub":
1485 (name, addr) = self.parse_address(field[9])
1487 self.keys[key]["email"] = addr
1488 self.keys[key]["name"] = name
1489 self.keys[key]["fingerprints"] = []
1491 elif key and field[0] == "sub" and len(field) >= 12:
1492 signingkey = ("s" in field[11])
1493 elif key and field[0] == "uid":
1494 (name, addr) = self.parse_address(field[9])
1495 if "email" not in self.keys[key] and "@" in addr:
1496 self.keys[key]["email"] = addr
1497 self.keys[key]["name"] = name
1498 elif signingkey and field[0] == "fpr":
1499 self.keys[key]["fingerprints"].append(field[9])
1500 self.fpr_lookup[field[9]] = key
1502 def import_users_from_ldap(self, session):
1506 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1507 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1509 l = ldap.open(LDAPServer)
1510 l.simple_bind_s("","")
1511 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1512 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1513 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1515 ldap_fin_uid_id = {}
1522 uid = entry["uid"][0]
1523 name = get_ldap_name(entry)
1524 fingerprints = entry["keyFingerPrint"]
1526 for f in fingerprints:
1527 key = self.fpr_lookup.get(f, None)
1528 if key not in self.keys:
1530 self.keys[key]["uid"] = uid
1534 keyid = get_or_set_uid(uid, session).uid_id
1535 byuid[keyid] = (uid, name)
1536 byname[uid] = (keyid, name)
1538 return (byname, byuid)
1540 def generate_users_from_keyring(self, format, session):
1544 for x in self.keys.keys():
1545 if "email" not in self.keys[x]:
1547 self.keys[x]["uid"] = format % "invalid-uid"
1549 uid = format % self.keys[x]["email"]
1550 keyid = get_or_set_uid(uid, session).uid_id
1551 byuid[keyid] = (uid, self.keys[x]["name"])
1552 byname[uid] = (keyid, self.keys[x]["name"])
1553 self.keys[x]["uid"] = uid
1556 uid = format % "invalid-uid"
1557 keyid = get_or_set_uid(uid, session).uid_id
1558 byuid[keyid] = (uid, "ungeneratable user id")
1559 byname[uid] = (keyid, "ungeneratable user id")
1561 return (byname, byuid)
1563 __all__.append('Keyring')
1566 def get_keyring(keyring, session=None):
1568 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1569 If C{keyring} already has an entry, simply return the existing Keyring
1571 @type keyring: string
1572 @param keyring: the keyring name
1575 @return: the Keyring object for this keyring
1578 q = session.query(Keyring).filter_by(keyring_name=keyring)
1582 except NoResultFound:
1585 __all__.append('get_keyring')
1587 ################################################################################
1589 class KeyringACLMap(object):
1590 def __init__(self, *args, **kwargs):
1594 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1596 __all__.append('KeyringACLMap')
1598 ################################################################################
1600 class DBChange(object):
1601 def __init__(self, *args, **kwargs):
1605 return '<DBChange %s>' % self.changesname
1607 def clean_from_queue(self):
1608 session = DBConn().session().object_session(self)
1610 # Remove changes_pool_files entries
1613 # Remove changes_pending_files references
1616 # Clear out of queue
1617 self.in_queue = None
1618 self.approved_for_id = None
1620 __all__.append('DBChange')
1623 def get_dbchange(filename, session=None):
1625 returns DBChange object for given C{filename}.
1627 @type filename: string
1628 @param filename: the name of the file
1630 @type session: Session
1631 @param session: Optional SQLA session object (a temporary one will be
1632 generated if not supplied)
1635 @return: DBChange object for the given filename (C{None} if not present)
1638 q = session.query(DBChange).filter_by(changesname=filename)
1642 except NoResultFound:
1645 __all__.append('get_dbchange')
1647 ################################################################################
1649 class Location(ORMObject):
1650 def __init__(self, path = None, component = None):
1652 self.component = component
1653 # the column 'type' should go away, see comment at mapper
1654 self.archive_type = 'pool'
1656 def properties(self):
1657 return ['path', 'location_id', 'archive_type', 'component', \
1660 def not_null_constraints(self):
1661 return ['path', 'archive_type']
1663 __all__.append('Location')
1666 def get_location(location, component=None, archive=None, session=None):
1668 Returns Location object for the given combination of location, component
1671 @type location: string
1672 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1674 @type component: string
1675 @param component: the component name (if None, no restriction applied)
1677 @type archive: string
1678 @param archive: the archive name (if None, no restriction applied)
1680 @rtype: Location / None
1681 @return: Either a Location object or None if one can't be found
1684 q = session.query(Location).filter_by(path=location)
1686 if archive is not None:
1687 q = q.join(Archive).filter_by(archive_name=archive)
1689 if component is not None:
1690 q = q.join(Component).filter_by(component_name=component)
1694 except NoResultFound:
1697 __all__.append('get_location')
1699 ################################################################################
1701 class Maintainer(ORMObject):
1702 def __init__(self, name = None):
1705 def properties(self):
1706 return ['name', 'maintainer_id']
1708 def not_null_constraints(self):
1711 def get_split_maintainer(self):
1712 if not hasattr(self, 'name') or self.name is None:
1713 return ('', '', '', '')
1715 return fix_maintainer(self.name.strip())
1717 __all__.append('Maintainer')
1720 def get_or_set_maintainer(name, session=None):
1722 Returns Maintainer object for given maintainer name.
1724 If no matching maintainer name is found, a row is inserted.
1727 @param name: The maintainer name to add
1729 @type session: SQLAlchemy
1730 @param session: Optional SQL session object (a temporary one will be
1731 generated if not supplied). If not passed, a commit will be performed at
1732 the end of the function, otherwise the caller is responsible for commiting.
1733 A flush will be performed either way.
1736 @return: the Maintainer object for the given maintainer
1739 q = session.query(Maintainer).filter_by(name=name)
1742 except NoResultFound:
1743 maintainer = Maintainer()
1744 maintainer.name = name
1745 session.add(maintainer)
1746 session.commit_or_flush()
1751 __all__.append('get_or_set_maintainer')
1754 def get_maintainer(maintainer_id, session=None):
1756 Return the name of the maintainer behind C{maintainer_id} or None if that
1757 maintainer_id is invalid.
1759 @type maintainer_id: int
1760 @param maintainer_id: the id of the maintainer
1763 @return: the Maintainer with this C{maintainer_id}
1766 return session.query(Maintainer).get(maintainer_id)
1768 __all__.append('get_maintainer')
1770 ################################################################################
1772 class NewComment(object):
1773 def __init__(self, *args, **kwargs):
1777 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1779 __all__.append('NewComment')
1782 def has_new_comment(package, version, session=None):
1784 Returns true if the given combination of C{package}, C{version} has a comment.
1786 @type package: string
1787 @param package: name of the package
1789 @type version: string
1790 @param version: package version
1792 @type session: Session
1793 @param session: Optional SQLA session object (a temporary one will be
1794 generated if not supplied)
1800 q = session.query(NewComment)
1801 q = q.filter_by(package=package)
1802 q = q.filter_by(version=version)
1804 return bool(q.count() > 0)
1806 __all__.append('has_new_comment')
1809 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1811 Returns (possibly empty) list of NewComment objects for the given
1814 @type package: string (optional)
1815 @param package: name of the package
1817 @type version: string (optional)
1818 @param version: package version
1820 @type comment_id: int (optional)
1821 @param comment_id: An id of a comment
1823 @type session: Session
1824 @param session: Optional SQLA session object (a temporary one will be
1825 generated if not supplied)
1828 @return: A (possibly empty) list of NewComment objects will be returned
1831 q = session.query(NewComment)
1832 if package is not None: q = q.filter_by(package=package)
1833 if version is not None: q = q.filter_by(version=version)
1834 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1838 __all__.append('get_new_comments')
1840 ################################################################################
1842 class Override(ORMObject):
1843 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1844 section = None, priority = None):
1845 self.package = package
1847 self.component = component
1848 self.overridetype = overridetype
1849 self.section = section
1850 self.priority = priority
1852 def properties(self):
1853 return ['package', 'suite', 'component', 'overridetype', 'section', \
1856 def not_null_constraints(self):
1857 return ['package', 'suite', 'component', 'overridetype', 'section']
1859 __all__.append('Override')
1862 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1864 Returns Override object for the given parameters
1866 @type package: string
1867 @param package: The name of the package
1869 @type suite: string, list or None
1870 @param suite: The name of the suite (or suites if a list) to limit to. If
1871 None, don't limit. Defaults to None.
1873 @type component: string, list or None
1874 @param component: The name of the component (or components if a list) to
1875 limit to. If None, don't limit. Defaults to None.
1877 @type overridetype: string, list or None
1878 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1879 limit to. If None, don't limit. Defaults to None.
1881 @type session: Session
1882 @param session: Optional SQLA session object (a temporary one will be
1883 generated if not supplied)
1886 @return: A (possibly empty) list of Override objects will be returned
1889 q = session.query(Override)
1890 q = q.filter_by(package=package)
1892 if suite is not None:
1893 if not isinstance(suite, list): suite = [suite]
1894 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1896 if component is not None:
1897 if not isinstance(component, list): component = [component]
1898 q = q.join(Component).filter(Component.component_name.in_(component))
1900 if overridetype is not None:
1901 if not isinstance(overridetype, list): overridetype = [overridetype]
1902 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1906 __all__.append('get_override')
1909 ################################################################################
1911 class OverrideType(ORMObject):
1912 def __init__(self, overridetype = None):
1913 self.overridetype = overridetype
1915 def properties(self):
1916 return ['overridetype', 'overridetype_id', 'overrides_count']
1918 def not_null_constraints(self):
1919 return ['overridetype']
1921 __all__.append('OverrideType')
1924 def get_override_type(override_type, session=None):
1926 Returns OverrideType object for given C{override type}.
1928 @type override_type: string
1929 @param override_type: The name of the override type
1931 @type session: Session
1932 @param session: Optional SQLA session object (a temporary one will be
1933 generated if not supplied)
1936 @return: the database id for the given override type
1939 q = session.query(OverrideType).filter_by(overridetype=override_type)
1943 except NoResultFound:
1946 __all__.append('get_override_type')
1948 ################################################################################
1950 class DebContents(object):
1951 def __init__(self, *args, **kwargs):
1955 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1957 __all__.append('DebContents')
1960 class UdebContents(object):
1961 def __init__(self, *args, **kwargs):
1965 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1967 __all__.append('UdebContents')
1969 class PendingBinContents(object):
1970 def __init__(self, *args, **kwargs):
1974 return '<PendingBinContents %s>' % self.contents_id
1976 __all__.append('PendingBinContents')
1978 def insert_pending_content_paths(package,
1983 Make sure given paths are temporarily associated with given
1987 @param package: the package to associate with should have been read in from the binary control file
1988 @type fullpaths: list
1989 @param fullpaths: the list of paths of the file being associated with the binary
1990 @type session: SQLAlchemy session
1991 @param session: Optional SQLAlchemy session. If this is passed, the caller
1992 is responsible for ensuring a transaction has begun and committing the
1993 results or rolling back based on the result code. If not passed, a commit
1994 will be performed at the end of the function
1996 @return: True upon success, False if there is a problem
1999 privatetrans = False
2002 session = DBConn().session()
2006 arch = get_architecture(package['Architecture'], session)
2007 arch_id = arch.arch_id
2009 # Remove any already existing recorded files for this package
2010 q = session.query(PendingBinContents)
2011 q = q.filter_by(package=package['Package'])
2012 q = q.filter_by(version=package['Version'])
2013 q = q.filter_by(architecture=arch_id)
2016 for fullpath in fullpaths:
2018 if fullpath.startswith( "./" ):
2019 fullpath = fullpath[2:]
2021 pca = PendingBinContents()
2022 pca.package = package['Package']
2023 pca.version = package['Version']
2025 pca.architecture = arch_id
2028 pca.type = 8 # gross
2030 pca.type = 7 # also gross
2033 # Only commit if we set up the session ourself
2041 except Exception, e:
2042 traceback.print_exc()
2044 # Only rollback if we set up the session ourself
2051 __all__.append('insert_pending_content_paths')
2053 ################################################################################
2055 class PolicyQueue(object):
2056 def __init__(self, *args, **kwargs):
2060 return '<PolicyQueue %s>' % self.queue_name
2062 __all__.append('PolicyQueue')
2065 def get_policy_queue(queuename, session=None):
2067 Returns PolicyQueue object for given C{queue name}
2069 @type queuename: string
2070 @param queuename: The name of the queue
2072 @type session: Session
2073 @param session: Optional SQLA session object (a temporary one will be
2074 generated if not supplied)
2077 @return: PolicyQueue object for the given queue
2080 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2084 except NoResultFound:
2087 __all__.append('get_policy_queue')
2090 def get_policy_queue_from_path(pathname, session=None):
2092 Returns PolicyQueue object for given C{path name}
2094 @type queuename: string
2095 @param queuename: The path
2097 @type session: Session
2098 @param session: Optional SQLA session object (a temporary one will be
2099 generated if not supplied)
2102 @return: PolicyQueue object for the given queue
2105 q = session.query(PolicyQueue).filter_by(path=pathname)
2109 except NoResultFound:
2112 __all__.append('get_policy_queue_from_path')
2114 ################################################################################
2116 class Priority(ORMObject):
2117 def __init__(self, priority = None, level = None):
2118 self.priority = priority
2121 def properties(self):
2122 return ['priority', 'priority_id', 'level', 'overrides_count']
2124 def not_null_constraints(self):
2125 return ['priority', 'level']
2127 def __eq__(self, val):
2128 if isinstance(val, str):
2129 return (self.priority == val)
2130 # This signals to use the normal comparison operator
2131 return NotImplemented
2133 def __ne__(self, val):
2134 if isinstance(val, str):
2135 return (self.priority != val)
2136 # This signals to use the normal comparison operator
2137 return NotImplemented
2139 __all__.append('Priority')
2142 def get_priority(priority, session=None):
2144 Returns Priority object for given C{priority name}.
2146 @type priority: string
2147 @param priority: The name of the priority
2149 @type session: Session
2150 @param session: Optional SQLA session object (a temporary one will be
2151 generated if not supplied)
2154 @return: Priority object for the given priority
2157 q = session.query(Priority).filter_by(priority=priority)
2161 except NoResultFound:
2164 __all__.append('get_priority')
2167 def get_priorities(session=None):
2169 Returns dictionary of priority names -> id mappings
2171 @type session: Session
2172 @param session: Optional SQL session object (a temporary one will be
2173 generated if not supplied)
2176 @return: dictionary of priority names -> id mappings
2180 q = session.query(Priority)
2182 ret[x.priority] = x.priority_id
2186 __all__.append('get_priorities')
2188 ################################################################################
2190 class Section(ORMObject):
2191 def __init__(self, section = None):
2192 self.section = section
2194 def properties(self):
2195 return ['section', 'section_id', 'overrides_count']
2197 def not_null_constraints(self):
2200 def __eq__(self, val):
2201 if isinstance(val, str):
2202 return (self.section == val)
2203 # This signals to use the normal comparison operator
2204 return NotImplemented
2206 def __ne__(self, val):
2207 if isinstance(val, str):
2208 return (self.section != val)
2209 # This signals to use the normal comparison operator
2210 return NotImplemented
2212 __all__.append('Section')
2215 def get_section(section, session=None):
2217 Returns Section object for given C{section name}.
2219 @type section: string
2220 @param section: The name of the section
2222 @type session: Session
2223 @param session: Optional SQLA session object (a temporary one will be
2224 generated if not supplied)
2227 @return: Section object for the given section name
2230 q = session.query(Section).filter_by(section=section)
2234 except NoResultFound:
2237 __all__.append('get_section')
2240 def get_sections(session=None):
2242 Returns dictionary of section names -> id mappings
2244 @type session: Session
2245 @param session: Optional SQL session object (a temporary one will be
2246 generated if not supplied)
2249 @return: dictionary of section names -> id mappings
2253 q = session.query(Section)
2255 ret[x.section] = x.section_id
2259 __all__.append('get_sections')
2261 ################################################################################
2263 class DBSource(ORMObject):
2264 def __init__(self, source = None, version = None, maintainer = None, \
2265 changedby = None, poolfile = None, install_date = None):
2266 self.source = source
2267 self.version = version
2268 self.maintainer = maintainer
2269 self.changedby = changedby
2270 self.poolfile = poolfile
2271 self.install_date = install_date
2273 def properties(self):
2274 return ['source', 'source_id', 'maintainer', 'changedby', \
2275 'fingerprint', 'poolfile', 'version', 'suites_count', \
2276 'install_date', 'binaries_count']
2278 def not_null_constraints(self):
2279 return ['source', 'version', 'install_date', 'maintainer', \
2280 'changedby', 'poolfile', 'install_date']
2282 __all__.append('DBSource')
2285 def source_exists(source, source_version, suites = ["any"], session=None):
2287 Ensure that source exists somewhere in the archive for the binary
2288 upload being processed.
2289 1. exact match => 1.0-3
2290 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2292 @type source: string
2293 @param source: source name
2295 @type source_version: string
2296 @param source_version: expected source version
2299 @param suites: list of suites to check in, default I{any}
2301 @type session: Session
2302 @param session: Optional SQLA session object (a temporary one will be
2303 generated if not supplied)
2306 @return: returns 1 if a source with expected version is found, otherwise 0
2313 from daklib.regexes import re_bin_only_nmu
2314 orig_source_version = re_bin_only_nmu.sub('', source_version)
2316 for suite in suites:
2317 q = session.query(DBSource).filter_by(source=source). \
2318 filter(DBSource.version.in_([source_version, orig_source_version]))
2320 # source must exist in suite X, or in some other suite that's
2321 # mapped to X, recursively... silent-maps are counted too,
2322 # unreleased-maps aren't.
2323 maps = cnf.ValueList("SuiteMappings")[:]
2325 maps = [ m.split() for m in maps ]
2326 maps = [ (x[1], x[2]) for x in maps
2327 if x[0] == "map" or x[0] == "silent-map" ]
2329 for (from_, to) in maps:
2330 if from_ in s and to not in s:
2333 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2338 # No source found so return not ok
2343 __all__.append('source_exists')
2346 def get_suites_source_in(source, session=None):
2348 Returns list of Suite objects which given C{source} name is in
2351 @param source: DBSource package name to search for
2354 @return: list of Suite objects for the given source
2357 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2359 __all__.append('get_suites_source_in')
2362 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2364 Returns list of DBSource objects for given C{source} name and other parameters
2367 @param source: DBSource package name to search for
2369 @type version: str or None
2370 @param version: DBSource version name to search for or None if not applicable
2372 @type dm_upload_allowed: bool
2373 @param dm_upload_allowed: If None, no effect. If True or False, only
2374 return packages with that dm_upload_allowed setting
2376 @type session: Session
2377 @param session: Optional SQL session object (a temporary one will be
2378 generated if not supplied)
2381 @return: list of DBSource objects for the given name (may be empty)
2384 q = session.query(DBSource).filter_by(source=source)
2386 if version is not None:
2387 q = q.filter_by(version=version)
2389 if dm_upload_allowed is not None:
2390 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2394 __all__.append('get_sources_from_name')
2396 # FIXME: This function fails badly if it finds more than 1 source package and
2397 # its implementation is trivial enough to be inlined.
2399 def get_source_in_suite(source, suite, session=None):
2401 Returns a DBSource object for a combination of C{source} and C{suite}.
2403 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2404 - B{suite} - a suite name, eg. I{unstable}
2406 @type source: string
2407 @param source: source package name
2410 @param suite: the suite name
2413 @return: the version for I{source} in I{suite}
2417 q = get_suite(suite, session).get_sources(source)
2420 except NoResultFound:
2423 __all__.append('get_source_in_suite')
2425 ################################################################################
2428 def add_dsc_to_db(u, filename, session=None):
2429 entry = u.pkg.files[filename]
2433 source.source = u.pkg.dsc["source"]
2434 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2435 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2436 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2437 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2438 source.install_date = datetime.now().date()
2440 dsc_component = entry["component"]
2441 dsc_location_id = entry["location id"]
2443 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2445 # Set up a new poolfile if necessary
2446 if not entry.has_key("files id") or not entry["files id"]:
2447 filename = entry["pool name"] + filename
2448 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2450 pfs.append(poolfile)
2451 entry["files id"] = poolfile.file_id
2453 source.poolfile_id = entry["files id"]
2456 suite_names = u.pkg.changes["distribution"].keys()
2457 source.suites = session.query(Suite). \
2458 filter(Suite.suite_name.in_(suite_names)).all()
2460 # Add the source files to the DB (files and dsc_files)
2462 dscfile.source_id = source.source_id
2463 dscfile.poolfile_id = entry["files id"]
2464 session.add(dscfile)
2466 for dsc_file, dentry in u.pkg.dsc_files.items():
2468 df.source_id = source.source_id
2470 # If the .orig tarball is already in the pool, it's
2471 # files id is stored in dsc_files by check_dsc().
2472 files_id = dentry.get("files id", None)
2474 # Find the entry in the files hash
2475 # TODO: Bail out here properly
2477 for f, e in u.pkg.files.items():
2482 if files_id is None:
2483 filename = dfentry["pool name"] + dsc_file
2485 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2486 # FIXME: needs to check for -1/-2 and or handle exception
2487 if found and obj is not None:
2488 files_id = obj.file_id
2491 # If still not found, add it
2492 if files_id is None:
2493 # HACK: Force sha1sum etc into dentry
2494 dentry["sha1sum"] = dfentry["sha1sum"]
2495 dentry["sha256sum"] = dfentry["sha256sum"]
2496 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2497 pfs.append(poolfile)
2498 files_id = poolfile.file_id
2500 poolfile = get_poolfile_by_id(files_id, session)
2501 if poolfile is None:
2502 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2503 pfs.append(poolfile)
2505 df.poolfile_id = files_id
2508 # Add the src_uploaders to the DB
2509 uploader_ids = [source.maintainer_id]
2510 if u.pkg.dsc.has_key("uploaders"):
2511 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2513 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2516 for up_id in uploader_ids:
2517 if added_ids.has_key(up_id):
2519 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2525 su.maintainer_id = up_id
2526 su.source_id = source.source_id
2531 return source, dsc_component, dsc_location_id, pfs
2533 __all__.append('add_dsc_to_db')
2536 def add_deb_to_db(u, filename, session=None):
2538 Contrary to what you might expect, this routine deals with both
2539 debs and udebs. That info is in 'dbtype', whilst 'type' is
2540 'deb' for both of them
2543 entry = u.pkg.files[filename]
2546 bin.package = entry["package"]
2547 bin.version = entry["version"]
2548 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2549 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2550 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2551 bin.binarytype = entry["dbtype"]
2554 filename = entry["pool name"] + filename
2555 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2556 if not entry.get("location id", None):
2557 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2559 if entry.get("files id", None):
2560 poolfile = get_poolfile_by_id(bin.poolfile_id)
2561 bin.poolfile_id = entry["files id"]
2563 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2564 bin.poolfile_id = entry["files id"] = poolfile.file_id
2567 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2568 if len(bin_sources) != 1:
2569 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2570 (bin.package, bin.version, entry["architecture"],
2571 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2573 bin.source_id = bin_sources[0].source_id
2575 # Add and flush object so it has an ID
2578 suite_names = u.pkg.changes["distribution"].keys()
2579 bin.suites = session.query(Suite). \
2580 filter(Suite.suite_name.in_(suite_names)).all()
2584 # Deal with contents - disabled for now
2585 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2587 # print "REJECT\nCould not determine contents of package %s" % bin.package
2588 # session.rollback()
2589 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2593 __all__.append('add_deb_to_db')
2595 ################################################################################
2597 class SourceACL(object):
2598 def __init__(self, *args, **kwargs):
2602 return '<SourceACL %s>' % self.source_acl_id
2604 __all__.append('SourceACL')
2606 ################################################################################
2608 class SrcFormat(object):
2609 def __init__(self, *args, **kwargs):
2613 return '<SrcFormat %s>' % (self.format_name)
2615 __all__.append('SrcFormat')
2617 ################################################################################
2619 class SrcUploader(object):
2620 def __init__(self, *args, **kwargs):
2624 return '<SrcUploader %s>' % self.uploader_id
2626 __all__.append('SrcUploader')
2628 ################################################################################
2630 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2631 ('SuiteID', 'suite_id'),
2632 ('Version', 'version'),
2633 ('Origin', 'origin'),
2635 ('Description', 'description'),
2636 ('Untouchable', 'untouchable'),
2637 ('Announce', 'announce'),
2638 ('Codename', 'codename'),
2639 ('OverrideCodename', 'overridecodename'),
2640 ('ValidTime', 'validtime'),
2641 ('Priority', 'priority'),
2642 ('NotAutomatic', 'notautomatic'),
2643 ('CopyChanges', 'copychanges'),
2644 ('OverrideSuite', 'overridesuite')]
2646 # Why the heck don't we have any UNIQUE constraints in table suite?
2647 # TODO: Add UNIQUE constraints for appropriate columns.
2648 class Suite(ORMObject):
2649 def __init__(self, suite_name = None, version = None):
2650 self.suite_name = suite_name
2651 self.version = version
2653 def properties(self):
2654 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2657 def not_null_constraints(self):
2658 return ['suite_name', 'version']
2660 def __eq__(self, val):
2661 if isinstance(val, str):
2662 return (self.suite_name == val)
2663 # This signals to use the normal comparison operator
2664 return NotImplemented
2666 def __ne__(self, val):
2667 if isinstance(val, str):
2668 return (self.suite_name != val)
2669 # This signals to use the normal comparison operator
2670 return NotImplemented
2674 for disp, field in SUITE_FIELDS:
2675 val = getattr(self, field, None)
2677 ret.append("%s: %s" % (disp, val))
2679 return "\n".join(ret)
2681 def get_architectures(self, skipsrc=False, skipall=False):
2683 Returns list of Architecture objects
2685 @type skipsrc: boolean
2686 @param skipsrc: Whether to skip returning the 'source' architecture entry
2689 @type skipall: boolean
2690 @param skipall: Whether to skip returning the 'all' architecture entry
2694 @return: list of Architecture objects for the given name (may be empty)
2697 q = object_session(self).query(Architecture).with_parent(self)
2699 q = q.filter(Architecture.arch_string != 'source')
2701 q = q.filter(Architecture.arch_string != 'all')
2702 return q.order_by(Architecture.arch_string).all()
2704 def get_sources(self, source):
2706 Returns a query object representing DBSource that is part of C{suite}.
2708 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2710 @type source: string
2711 @param source: source package name
2713 @rtype: sqlalchemy.orm.query.Query
2714 @return: a query of DBSource
2718 session = object_session(self)
2719 return session.query(DBSource).filter_by(source = source). \
2722 __all__.append('Suite')
2725 def get_suite(suite, session=None):
2727 Returns Suite object for given C{suite name}.
2730 @param suite: The name of the suite
2732 @type session: Session
2733 @param session: Optional SQLA session object (a temporary one will be
2734 generated if not supplied)
2737 @return: Suite object for the requested suite name (None if not present)
2740 q = session.query(Suite).filter_by(suite_name=suite)
2744 except NoResultFound:
2747 __all__.append('get_suite')
2749 ################################################################################
2751 # TODO: should be removed because the implementation is too trivial
2753 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2755 Returns list of Architecture objects for given C{suite} name
2758 @param suite: Suite name to search for
2760 @type skipsrc: boolean
2761 @param skipsrc: Whether to skip returning the 'source' architecture entry
2764 @type skipall: boolean
2765 @param skipall: Whether to skip returning the 'all' architecture entry
2768 @type session: Session
2769 @param session: Optional SQL session object (a temporary one will be
2770 generated if not supplied)
2773 @return: list of Architecture objects for the given name (may be empty)
2776 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2778 __all__.append('get_suite_architectures')
2780 ################################################################################
2782 class SuiteSrcFormat(object):
2783 def __init__(self, *args, **kwargs):
2787 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2789 __all__.append('SuiteSrcFormat')
2792 def get_suite_src_formats(suite, session=None):
2794 Returns list of allowed SrcFormat for C{suite}.
2797 @param suite: Suite name to search for
2799 @type session: Session
2800 @param session: Optional SQL session object (a temporary one will be
2801 generated if not supplied)
2804 @return: the list of allowed source formats for I{suite}
2807 q = session.query(SrcFormat)
2808 q = q.join(SuiteSrcFormat)
2809 q = q.join(Suite).filter_by(suite_name=suite)
2810 q = q.order_by('format_name')
2814 __all__.append('get_suite_src_formats')
2816 ################################################################################
2818 class Uid(ORMObject):
2819 def __init__(self, uid = None, name = None):
2823 def __eq__(self, val):
2824 if isinstance(val, str):
2825 return (self.uid == val)
2826 # This signals to use the normal comparison operator
2827 return NotImplemented
2829 def __ne__(self, val):
2830 if isinstance(val, str):
2831 return (self.uid != val)
2832 # This signals to use the normal comparison operator
2833 return NotImplemented
2835 def properties(self):
2836 return ['uid', 'name', 'fingerprint']
2838 def not_null_constraints(self):
2841 __all__.append('Uid')
2844 def get_or_set_uid(uidname, session=None):
2846 Returns uid object for given uidname.
2848 If no matching uidname is found, a row is inserted.
2850 @type uidname: string
2851 @param uidname: The uid to add
2853 @type session: SQLAlchemy
2854 @param session: Optional SQL session object (a temporary one will be
2855 generated if not supplied). If not passed, a commit will be performed at
2856 the end of the function, otherwise the caller is responsible for commiting.
2859 @return: the uid object for the given uidname
2862 q = session.query(Uid).filter_by(uid=uidname)
2866 except NoResultFound:
2870 session.commit_or_flush()
2875 __all__.append('get_or_set_uid')
2878 def get_uid_from_fingerprint(fpr, session=None):
2879 q = session.query(Uid)
2880 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2884 except NoResultFound:
2887 __all__.append('get_uid_from_fingerprint')
2889 ################################################################################
2891 class UploadBlock(object):
2892 def __init__(self, *args, **kwargs):
2896 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2898 __all__.append('UploadBlock')
2900 ################################################################################
2902 class DBConn(object):
2904 database module init.
2908 def __init__(self, *args, **kwargs):
2909 self.__dict__ = self.__shared_state
2911 if not getattr(self, 'initialised', False):
2912 self.initialised = True
2913 self.debug = kwargs.has_key('debug')
2916 def __setuptables(self):
2917 tables_with_primary = (
2925 'build_queue_files',
2930 'changes_pending_binaries',
2931 'changes_pending_files',
2932 'changes_pending_source',
2942 'pending_bin_contents',
2956 tables_no_primary = (
2957 'changes_pending_files_map',
2958 'changes_pending_source_files',
2959 'changes_pool_files',
2961 # TODO: the maintainer column in table override should be removed.
2963 'suite_architectures',
2964 'suite_src_formats',
2965 'suite_build_queue_copy',
2970 'almost_obsolete_all_associations',
2971 'almost_obsolete_src_associations',
2972 'any_associations_source',
2973 'bin_assoc_by_arch',
2974 'bin_associations_binaries',
2975 'binaries_suite_arch',
2976 'binfiles_suite_component_arch',
2979 'newest_all_associations',
2980 'newest_any_associations',
2982 'newest_src_association',
2983 'obsolete_all_associations',
2984 'obsolete_any_associations',
2985 'obsolete_any_by_all_associations',
2986 'obsolete_src_associations',
2988 'src_associations_bin',
2989 'src_associations_src',
2990 'suite_arch_by_name',
2993 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2994 # correctly and that is why we have to use a workaround. It can
2995 # be removed as soon as we switch to version 0.6.
2996 for table_name in tables_with_primary:
2997 table = Table(table_name, self.db_meta, \
2998 Column('id', Integer, primary_key = True), \
2999 autoload=True, useexisting=True)
3000 setattr(self, 'tbl_%s' % table_name, table)
3002 for table_name in tables_no_primary:
3003 table = Table(table_name, self.db_meta, autoload=True)
3004 setattr(self, 'tbl_%s' % table_name, table)
3006 # bin_contents needs special attention until update #41 has been
3008 self.tbl_bin_contents = Table('bin_contents', self.db_meta, \
3009 Column('file', Text, primary_key = True),
3010 Column('binary_id', Integer, ForeignKey('binaries.id'), \
3011 primary_key = True),
3012 autoload=True, useexisting=True)
3014 for view_name in views:
3015 view = Table(view_name, self.db_meta, autoload=True)
3016 setattr(self, 'view_%s' % view_name, view)
3018 def __setupmappers(self):
3019 mapper(Architecture, self.tbl_architecture,
3020 properties = dict(arch_id = self.tbl_architecture.c.id,
3021 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3022 order_by='suite_name',
3023 backref=backref('architectures', order_by='arch_string'))),
3024 extension = validator)
3026 mapper(Archive, self.tbl_archive,
3027 properties = dict(archive_id = self.tbl_archive.c.id,
3028 archive_name = self.tbl_archive.c.name))
3030 mapper(PendingBinContents, self.tbl_pending_bin_contents,
3031 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3032 filename = self.tbl_pending_bin_contents.c.filename,
3033 package = self.tbl_pending_bin_contents.c.package,
3034 version = self.tbl_pending_bin_contents.c.version,
3035 arch = self.tbl_pending_bin_contents.c.arch,
3036 otype = self.tbl_pending_bin_contents.c.type))
3038 mapper(DebContents, self.tbl_deb_contents,
3039 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3040 package=self.tbl_deb_contents.c.package,
3041 suite=self.tbl_deb_contents.c.suite,
3042 arch=self.tbl_deb_contents.c.arch,
3043 section=self.tbl_deb_contents.c.section,
3044 filename=self.tbl_deb_contents.c.filename))
3046 mapper(UdebContents, self.tbl_udeb_contents,
3047 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3048 package=self.tbl_udeb_contents.c.package,
3049 suite=self.tbl_udeb_contents.c.suite,
3050 arch=self.tbl_udeb_contents.c.arch,
3051 section=self.tbl_udeb_contents.c.section,
3052 filename=self.tbl_udeb_contents.c.filename))
3054 mapper(BuildQueue, self.tbl_build_queue,
3055 properties = dict(queue_id = self.tbl_build_queue.c.id))
3057 mapper(BuildQueueFile, self.tbl_build_queue_files,
3058 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3059 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3061 mapper(DBBinary, self.tbl_binaries,
3062 properties = dict(binary_id = self.tbl_binaries.c.id,
3063 package = self.tbl_binaries.c.package,
3064 version = self.tbl_binaries.c.version,
3065 maintainer_id = self.tbl_binaries.c.maintainer,
3066 maintainer = relation(Maintainer),
3067 source_id = self.tbl_binaries.c.source,
3068 source = relation(DBSource, backref='binaries'),
3069 arch_id = self.tbl_binaries.c.architecture,
3070 architecture = relation(Architecture),
3071 poolfile_id = self.tbl_binaries.c.file,
3072 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3073 binarytype = self.tbl_binaries.c.type,
3074 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3075 fingerprint = relation(Fingerprint),
3076 install_date = self.tbl_binaries.c.install_date,
3077 suites = relation(Suite, secondary=self.tbl_bin_associations,
3078 backref=backref('binaries', lazy='dynamic'))),
3079 extension = validator)
3081 mapper(BinaryACL, self.tbl_binary_acl,
3082 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3084 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3085 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3086 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3087 architecture = relation(Architecture)))
3089 mapper(Component, self.tbl_component,
3090 properties = dict(component_id = self.tbl_component.c.id,
3091 component_name = self.tbl_component.c.name),
3092 extension = validator)
3094 mapper(DBConfig, self.tbl_config,
3095 properties = dict(config_id = self.tbl_config.c.id))
3097 mapper(DSCFile, self.tbl_dsc_files,
3098 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3099 source_id = self.tbl_dsc_files.c.source,
3100 source = relation(DBSource),
3101 poolfile_id = self.tbl_dsc_files.c.file,
3102 poolfile = relation(PoolFile)))
3104 mapper(PoolFile, self.tbl_files,
3105 properties = dict(file_id = self.tbl_files.c.id,
3106 filesize = self.tbl_files.c.size,
3107 location_id = self.tbl_files.c.location,
3108 location = relation(Location,
3109 # using lazy='dynamic' in the back
3110 # reference because we have A LOT of
3111 # files in one location
3112 backref=backref('files', lazy='dynamic'))),
3113 extension = validator)
3115 mapper(Fingerprint, self.tbl_fingerprint,
3116 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3117 uid_id = self.tbl_fingerprint.c.uid,
3118 uid = relation(Uid),
3119 keyring_id = self.tbl_fingerprint.c.keyring,
3120 keyring = relation(Keyring),
3121 source_acl = relation(SourceACL),
3122 binary_acl = relation(BinaryACL)),
3123 extension = validator)
3125 mapper(Keyring, self.tbl_keyrings,
3126 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3127 keyring_id = self.tbl_keyrings.c.id))
3129 mapper(DBChange, self.tbl_changes,
3130 properties = dict(change_id = self.tbl_changes.c.id,
3131 poolfiles = relation(PoolFile,
3132 secondary=self.tbl_changes_pool_files,
3133 backref="changeslinks"),
3134 seen = self.tbl_changes.c.seen,
3135 source = self.tbl_changes.c.source,
3136 binaries = self.tbl_changes.c.binaries,
3137 architecture = self.tbl_changes.c.architecture,
3138 distribution = self.tbl_changes.c.distribution,
3139 urgency = self.tbl_changes.c.urgency,
3140 maintainer = self.tbl_changes.c.maintainer,
3141 changedby = self.tbl_changes.c.changedby,
3142 date = self.tbl_changes.c.date,
3143 version = self.tbl_changes.c.version,
3144 files = relation(ChangePendingFile,
3145 secondary=self.tbl_changes_pending_files_map,
3146 backref="changesfile"),
3147 in_queue_id = self.tbl_changes.c.in_queue,
3148 in_queue = relation(PolicyQueue,
3149 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3150 approved_for_id = self.tbl_changes.c.approved_for))
3152 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3153 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3155 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3156 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3157 filename = self.tbl_changes_pending_files.c.filename,
3158 size = self.tbl_changes_pending_files.c.size,
3159 md5sum = self.tbl_changes_pending_files.c.md5sum,
3160 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3161 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3163 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3164 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3165 change = relation(DBChange),
3166 maintainer = relation(Maintainer,
3167 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3168 changedby = relation(Maintainer,
3169 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3170 fingerprint = relation(Fingerprint),
3171 source_files = relation(ChangePendingFile,
3172 secondary=self.tbl_changes_pending_source_files,
3173 backref="pending_sources")))
3176 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3177 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3178 keyring = relation(Keyring, backref="keyring_acl_map"),
3179 architecture = relation(Architecture)))
3181 mapper(Location, self.tbl_location,
3182 properties = dict(location_id = self.tbl_location.c.id,
3183 component_id = self.tbl_location.c.component,
3184 component = relation(Component, backref='location'),
3185 archive_id = self.tbl_location.c.archive,
3186 archive = relation(Archive),
3187 # FIXME: the 'type' column is old cruft and
3188 # should be removed in the future.
3189 archive_type = self.tbl_location.c.type),
3190 extension = validator)
3192 mapper(Maintainer, self.tbl_maintainer,
3193 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3194 maintains_sources = relation(DBSource, backref='maintainer',
3195 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3196 changed_sources = relation(DBSource, backref='changedby',
3197 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3198 extension = validator)
3200 mapper(NewComment, self.tbl_new_comments,
3201 properties = dict(comment_id = self.tbl_new_comments.c.id))
3203 mapper(Override, self.tbl_override,
3204 properties = dict(suite_id = self.tbl_override.c.suite,
3205 suite = relation(Suite, \
3206 backref=backref('overrides', lazy='dynamic')),
3207 package = self.tbl_override.c.package,
3208 component_id = self.tbl_override.c.component,
3209 component = relation(Component, \
3210 backref=backref('overrides', lazy='dynamic')),
3211 priority_id = self.tbl_override.c.priority,
3212 priority = relation(Priority, \
3213 backref=backref('overrides', lazy='dynamic')),
3214 section_id = self.tbl_override.c.section,
3215 section = relation(Section, \
3216 backref=backref('overrides', lazy='dynamic')),
3217 overridetype_id = self.tbl_override.c.type,
3218 overridetype = relation(OverrideType, \
3219 backref=backref('overrides', lazy='dynamic'))))
3221 mapper(OverrideType, self.tbl_override_type,
3222 properties = dict(overridetype = self.tbl_override_type.c.type,
3223 overridetype_id = self.tbl_override_type.c.id))
3225 mapper(PolicyQueue, self.tbl_policy_queue,
3226 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3228 mapper(Priority, self.tbl_priority,
3229 properties = dict(priority_id = self.tbl_priority.c.id))
3231 mapper(Section, self.tbl_section,
3232 properties = dict(section_id = self.tbl_section.c.id,
3233 section=self.tbl_section.c.section))
3235 mapper(DBSource, self.tbl_source,
3236 properties = dict(source_id = self.tbl_source.c.id,
3237 version = self.tbl_source.c.version,
3238 maintainer_id = self.tbl_source.c.maintainer,
3239 poolfile_id = self.tbl_source.c.file,
3240 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3241 fingerprint_id = self.tbl_source.c.sig_fpr,
3242 fingerprint = relation(Fingerprint),
3243 changedby_id = self.tbl_source.c.changedby,
3244 srcfiles = relation(DSCFile,
3245 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3246 suites = relation(Suite, secondary=self.tbl_src_associations,
3247 backref=backref('sources', lazy='dynamic')),
3248 srcuploaders = relation(SrcUploader)),
3249 extension = validator)
3251 mapper(SourceACL, self.tbl_source_acl,
3252 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3254 mapper(SrcFormat, self.tbl_src_format,
3255 properties = dict(src_format_id = self.tbl_src_format.c.id,
3256 format_name = self.tbl_src_format.c.format_name))
3258 mapper(SrcUploader, self.tbl_src_uploaders,
3259 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3260 source_id = self.tbl_src_uploaders.c.source,
3261 source = relation(DBSource,
3262 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3263 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3264 maintainer = relation(Maintainer,
3265 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3267 mapper(Suite, self.tbl_suite,
3268 properties = dict(suite_id = self.tbl_suite.c.id,
3269 policy_queue = relation(PolicyQueue),
3270 copy_queues = relation(BuildQueue,
3271 secondary=self.tbl_suite_build_queue_copy)),
3272 extension = validator)
3274 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3275 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3276 suite = relation(Suite, backref='suitesrcformats'),
3277 src_format_id = self.tbl_suite_src_formats.c.src_format,
3278 src_format = relation(SrcFormat)))
3280 mapper(Uid, self.tbl_uid,
3281 properties = dict(uid_id = self.tbl_uid.c.id,
3282 fingerprint = relation(Fingerprint)),
3283 extension = validator)
3285 mapper(UploadBlock, self.tbl_upload_blocks,
3286 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3287 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3288 uid = relation(Uid, backref="uploadblocks")))
3290 mapper(BinContents, self.tbl_bin_contents,
3292 binary = relation(DBBinary,
3293 backref=backref('contents', lazy='dynamic', cascade='all')),
3294 file = self.tbl_bin_contents.c.file))
3296 ## Connection functions
3297 def __createconn(self):
3298 from config import Config
3302 connstr = "postgres://%s" % cnf["DB::Host"]
3303 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3304 connstr += ":%s" % cnf["DB::Port"]
3305 connstr += "/%s" % cnf["DB::Name"]
3308 connstr = "postgres:///%s" % cnf["DB::Name"]
3309 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3310 connstr += "?port=%s" % cnf["DB::Port"]
3312 engine_args = { 'echo': self.debug }
3313 if cnf.has_key('DB::PoolSize'):
3314 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3315 if cnf.has_key('DB::MaxOverflow'):
3316 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3317 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3318 cnf['DB::Unicode'] == 'false':
3319 engine_args['use_native_unicode'] = False
3321 self.db_pg = create_engine(connstr, **engine_args)
3322 self.db_meta = MetaData()
3323 self.db_meta.bind = self.db_pg
3324 self.db_smaker = sessionmaker(bind=self.db_pg,
3328 self.__setuptables()
3329 self.__setupmappers()
3332 return self.db_smaker()
3334 __all__.append('DBConn')