5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper
63 from sqlalchemy import types as sqltypes
65 # Don't remove this, we re-export the exceptions to scripts which import us
66 from sqlalchemy.exc import *
67 from sqlalchemy.orm.exc import NoResultFound
69 # Only import Config until Queue stuff is changed to store its config
71 from config import Config
72 from textutils import fix_maintainer
73 from dak_exceptions import DBUpdateError, NoSourceFieldError
75 # suppress some deprecation warnings in squeeze related to sqlalchemy
77 warnings.filterwarnings('ignore', \
78 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
80 # TODO: sqlalchemy needs some extra configuration to correctly reflect
81 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
82 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
85 ################################################################################
87 # Patch in support for the debversion field type so that it works during
91 # that is for sqlalchemy 0.6
92 UserDefinedType = sqltypes.UserDefinedType
94 # this one for sqlalchemy 0.5
95 UserDefinedType = sqltypes.TypeEngine
97 class DebVersion(UserDefinedType):
98 def get_col_spec(self):
101 def bind_processor(self, dialect):
104 # ' = None' is needed for sqlalchemy 0.5:
105 def result_processor(self, dialect, coltype = None):
108 sa_major_version = sqlalchemy.__version__[0:3]
109 if sa_major_version in ["0.5", "0.6"]:
110 from sqlalchemy.databases import postgres
111 postgres.ischema_names['debversion'] = DebVersion
113 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
115 ################################################################################
117 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
119 ################################################################################
121 def session_wrapper(fn):
123 Wrapper around common ".., session=None):" handling. If the wrapped
124 function is called without passing 'session', we create a local one
125 and destroy it when the function ends.
127 Also attaches a commit_or_flush method to the session; if we created a
128 local session, this is a synonym for session.commit(), otherwise it is a
129 synonym for session.flush().
132 def wrapped(*args, **kwargs):
133 private_transaction = False
135 # Find the session object
136 session = kwargs.get('session')
139 if len(args) <= len(getargspec(fn)[0]) - 1:
140 # No session specified as last argument or in kwargs
141 private_transaction = True
142 session = kwargs['session'] = DBConn().session()
144 # Session is last argument in args
148 session = args[-1] = DBConn().session()
149 private_transaction = True
151 if private_transaction:
152 session.commit_or_flush = session.commit
154 session.commit_or_flush = session.flush
157 return fn(*args, **kwargs)
159 if private_transaction:
160 # We created a session; close it.
163 wrapped.__doc__ = fn.__doc__
164 wrapped.func_name = fn.func_name
168 __all__.append('session_wrapper')
170 ################################################################################
172 class ORMObject(object):
174 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
175 derived classes must implement the properties() method.
178 def properties(self):
180 This method should be implemented by all derived classes and returns a
181 list of the important properties. The properties 'created' and
182 'modified' will be added automatically. A suffix '_count' should be
183 added to properties that are lists or query objects. The most important
184 property name should be returned as the first element in the list
185 because it is used by repr().
191 Returns a JSON representation of the object based on the properties
192 returned from the properties() method.
195 # add created and modified
196 all_properties = self.properties() + ['created', 'modified']
197 for property in all_properties:
198 # check for list or query
199 if property[-6:] == '_count':
200 real_property = property[:-6]
201 if not hasattr(self, real_property):
203 value = getattr(self, real_property)
204 if hasattr(value, '__len__'):
207 elif hasattr(value, 'count'):
209 value = value.count()
211 raise KeyError('Do not understand property %s.' % property)
213 if not hasattr(self, property):
216 value = getattr(self, property)
220 elif isinstance(value, ORMObject):
221 # use repr() for ORMObject types
224 # we want a string for all other types because json cannot
227 data[property] = value
228 return json.dumps(data)
232 Returns the name of the class.
234 return type(self).__name__
238 Returns a short string representation of the object using the first
239 element from the properties() method.
241 primary_property = self.properties()[0]
242 value = getattr(self, primary_property)
243 return '<%s %s>' % (self.classname(), str(value))
247 Returns a human readable form of the object using the properties()
250 return '<%s %s>' % (self.classname(), self.json())
252 def not_null_constraints(self):
254 Returns a list of properties that must be not NULL. Derived classes
255 should override this method if needed.
259 validation_message = \
260 "Validation failed because property '%s' must not be empty in object\n%s"
264 This function validates the not NULL constraints as returned by
265 not_null_constraints(). It raises the DBUpdateError exception if
268 for property in self.not_null_constraints():
269 # TODO: It is a bit awkward that the mapper configuration allow
270 # directly setting the numeric _id columns. We should get rid of it
272 if hasattr(self, property + '_id') and \
273 getattr(self, property + '_id') is not None:
275 if not hasattr(self, property) or getattr(self, property) is None:
276 raise DBUpdateError(self.validation_message % \
277 (property, str(self)))
281 def get(cls, primary_key, session = None):
283 This is a support function that allows getting an object by its primary
286 Architecture.get(3[, session])
288 instead of the more verbose
290 session.query(Architecture).get(3)
292 return session.query(cls).get(primary_key)
294 def session(self, replace = False):
296 Returns the current session that is associated with the object. May
297 return None is object is in detached state.
300 return object_session(self)
302 def clone(self, session = None):
304 Clones the current object in a new session and returns the new clone. A
305 fresh session is created if the optional session parameter is not
306 provided. The function will fail if a session is provided and has
309 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
310 an existing object to allow several threads to work with their own
311 instances of an ORMObject.
313 WARNING: Only persistent (committed) objects can be cloned. Changes
314 made to the original object that are not committed yet will get lost.
315 The session of the new object will always be rolled back to avoid
319 if self.session() is None:
320 raise RuntimeError( \
321 'Method clone() failed for detached object:\n%s' % self)
322 self.session().flush()
323 mapper = object_mapper(self)
324 primary_key = mapper.primary_key_from_instance(self)
325 object_class = self.__class__
327 session = DBConn().session()
328 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
329 raise RuntimeError( \
330 'Method clone() failed due to unflushed changes in session.')
331 new_object = session.query(object_class).get(primary_key)
333 if new_object is None:
334 raise RuntimeError( \
335 'Method clone() failed for non-persistent object:\n%s' % self)
338 __all__.append('ORMObject')
340 ################################################################################
342 class Validator(MapperExtension):
344 This class calls the validate() method for each instance for the
345 'before_update' and 'before_insert' events. A global object validator is
346 used for configuring the individual mappers.
349 def before_update(self, mapper, connection, instance):
353 def before_insert(self, mapper, connection, instance):
357 validator = Validator()
359 ################################################################################
361 class Architecture(ORMObject):
362 def __init__(self, arch_string = None, description = None):
363 self.arch_string = arch_string
364 self.description = description
366 def __eq__(self, val):
367 if isinstance(val, str):
368 return (self.arch_string== val)
369 # This signals to use the normal comparison operator
370 return NotImplemented
372 def __ne__(self, val):
373 if isinstance(val, str):
374 return (self.arch_string != val)
375 # This signals to use the normal comparison operator
376 return NotImplemented
378 def properties(self):
379 return ['arch_string', 'arch_id', 'suites_count']
381 def not_null_constraints(self):
382 return ['arch_string']
384 __all__.append('Architecture')
387 def get_architecture(architecture, session=None):
389 Returns database id for given C{architecture}.
391 @type architecture: string
392 @param architecture: The name of the architecture
394 @type session: Session
395 @param session: Optional SQLA session object (a temporary one will be
396 generated if not supplied)
399 @return: Architecture object for the given arch (None if not present)
402 q = session.query(Architecture).filter_by(arch_string=architecture)
406 except NoResultFound:
409 __all__.append('get_architecture')
411 # TODO: should be removed because the implementation is too trivial
413 def get_architecture_suites(architecture, session=None):
415 Returns list of Suite objects for given C{architecture} name
417 @type architecture: str
418 @param architecture: Architecture name to search for
420 @type session: Session
421 @param session: Optional SQL session object (a temporary one will be
422 generated if not supplied)
425 @return: list of Suite objects for the given name (may be empty)
428 return get_architecture(architecture, session).suites
430 __all__.append('get_architecture_suites')
432 ################################################################################
434 class Archive(object):
435 def __init__(self, *args, **kwargs):
439 return '<Archive %s>' % self.archive_name
441 __all__.append('Archive')
444 def get_archive(archive, session=None):
446 returns database id for given C{archive}.
448 @type archive: string
449 @param archive: the name of the arhive
451 @type session: Session
452 @param session: Optional SQLA session object (a temporary one will be
453 generated if not supplied)
456 @return: Archive object for the given name (None if not present)
459 archive = archive.lower()
461 q = session.query(Archive).filter_by(archive_name=archive)
465 except NoResultFound:
468 __all__.append('get_archive')
470 ################################################################################
472 class BinContents(ORMObject):
473 def __init__(self, file = None, binary = None):
477 def properties(self):
478 return ['file', 'binary']
480 __all__.append('BinContents')
482 ################################################################################
484 class DBBinary(ORMObject):
485 def __init__(self, package = None, source = None, version = None, \
486 maintainer = None, architecture = None, poolfile = None, \
488 self.package = package
490 self.version = version
491 self.maintainer = maintainer
492 self.architecture = architecture
493 self.poolfile = poolfile
494 self.binarytype = binarytype
496 def properties(self):
497 return ['package', 'version', 'maintainer', 'source', 'architecture', \
498 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
499 'suites_count', 'binary_id', 'contents_count']
501 def not_null_constraints(self):
502 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
505 def get_component_name(self):
506 return self.poolfile.location.component.component_name
508 def scan_contents(self):
510 Yields the contents of the package. Only regular files are yielded and
511 the path names are normalized after converting them from either utf-8 or
514 fullpath = self.poolfile.fullpath
515 debdata = Popen(['dpkg-deb', '--fsys-tarfile', fullpath],
516 stdout = PIPE).stdout
517 tar = TarFile.open(fileobj = debdata, mode = 'r|')
518 for member in tar.getmembers():
521 name = member.name.decode('utf-8')
522 catch UnicodeDecodeError:
523 name = member.name.decode('iso8859-1')
528 __all__.append('DBBinary')
531 def get_suites_binary_in(package, session=None):
533 Returns list of Suite objects which given C{package} name is in
536 @param package: DBBinary package name to search for
539 @return: list of Suite objects for the given package
542 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
544 __all__.append('get_suites_binary_in')
547 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
549 Returns the component name of the newest binary package in suite_list or
550 None if no package is found. The result can be optionally filtered by a list
551 of architecture names.
554 @param package: DBBinary package name to search for
556 @type suite_list: list of str
557 @param suite_list: list of suite_name items
559 @type arch_list: list of str
560 @param arch_list: optional list of arch_string items that defaults to []
562 @rtype: str or NoneType
563 @return: name of component or None
566 q = session.query(DBBinary).filter_by(package = package). \
567 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
568 if len(arch_list) > 0:
569 q = q.join(DBBinary.architecture). \
570 filter(Architecture.arch_string.in_(arch_list))
571 binary = q.order_by(desc(DBBinary.version)).first()
575 return binary.get_component_name()
577 __all__.append('get_component_by_package_suite')
579 ################################################################################
581 class BinaryACL(object):
582 def __init__(self, *args, **kwargs):
586 return '<BinaryACL %s>' % self.binary_acl_id
588 __all__.append('BinaryACL')
590 ################################################################################
592 class BinaryACLMap(object):
593 def __init__(self, *args, **kwargs):
597 return '<BinaryACLMap %s>' % self.binary_acl_map_id
599 __all__.append('BinaryACLMap')
601 ################################################################################
606 ArchiveDir "%(archivepath)s";
607 OverrideDir "%(overridedir)s";
608 CacheDir "%(cachedir)s";
613 Packages::Compress ". bzip2 gzip";
614 Sources::Compress ". bzip2 gzip";
619 bindirectory "incoming"
624 BinOverride "override.sid.all3";
625 BinCacheDB "packages-accepted.db";
627 FileList "%(filelist)s";
630 Packages::Extensions ".deb .udeb";
633 bindirectory "incoming/"
636 BinOverride "override.sid.all3";
637 SrcOverride "override.sid.all3.src";
638 FileList "%(filelist)s";
642 class BuildQueue(object):
643 def __init__(self, *args, **kwargs):
647 return '<BuildQueue %s>' % self.queue_name
649 def write_metadata(self, starttime, force=False):
650 # Do we write out metafiles?
651 if not (force or self.generate_metadata):
654 session = DBConn().session().object_session(self)
656 fl_fd = fl_name = ac_fd = ac_name = None
658 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
659 startdir = os.getcwd()
662 # Grab files we want to include
663 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
664 # Write file list with newer files
665 (fl_fd, fl_name) = mkstemp()
667 os.write(fl_fd, '%s\n' % n.fullpath)
672 # Write minimal apt.conf
673 # TODO: Remove hardcoding from template
674 (ac_fd, ac_name) = mkstemp()
675 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
677 'cachedir': cnf["Dir::Cache"],
678 'overridedir': cnf["Dir::Override"],
682 # Run apt-ftparchive generate
683 os.chdir(os.path.dirname(ac_name))
684 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
686 # Run apt-ftparchive release
687 # TODO: Eww - fix this
688 bname = os.path.basename(self.path)
692 # We have to remove the Release file otherwise it'll be included in the
695 os.unlink(os.path.join(bname, 'Release'))
699 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
701 # Crude hack with open and append, but this whole section is and should be redone.
702 if self.notautomatic:
703 release=open("Release", "a")
704 release.write("NotAutomatic: yes")
709 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
710 if cnf.has_key("Dinstall::SigningPubKeyring"):
711 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
713 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
715 # Move the files if we got this far
716 os.rename('Release', os.path.join(bname, 'Release'))
718 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
720 # Clean up any left behind files
747 def clean_and_update(self, starttime, Logger, dryrun=False):
748 """WARNING: This routine commits for you"""
749 session = DBConn().session().object_session(self)
751 if self.generate_metadata and not dryrun:
752 self.write_metadata(starttime)
754 # Grab files older than our execution time
755 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
761 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
763 Logger.log(["I: Removing %s from the queue" % o.fullpath])
764 os.unlink(o.fullpath)
767 # If it wasn't there, don't worry
768 if e.errno == ENOENT:
771 # TODO: Replace with proper logging call
772 Logger.log(["E: Could not remove %s" % o.fullpath])
779 for f in os.listdir(self.path):
780 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
784 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
785 except NoResultFound:
786 fp = os.path.join(self.path, f)
788 Logger.log(["I: Would remove unused link %s" % fp])
790 Logger.log(["I: Removing unused link %s" % fp])
794 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
796 def add_file_from_pool(self, poolfile):
797 """Copies a file into the pool. Assumes that the PoolFile object is
798 attached to the same SQLAlchemy session as the Queue object is.
800 The caller is responsible for committing after calling this function."""
801 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
803 # Check if we have a file of this name or this ID already
804 for f in self.queuefiles:
805 if f.fileid is not None and f.fileid == poolfile.file_id or \
806 f.poolfile.filename == poolfile_basename:
807 # In this case, update the BuildQueueFile entry so we
808 # don't remove it too early
809 f.lastused = datetime.now()
810 DBConn().session().object_session(poolfile).add(f)
813 # Prepare BuildQueueFile object
814 qf = BuildQueueFile()
815 qf.build_queue_id = self.queue_id
816 qf.lastused = datetime.now()
817 qf.filename = poolfile_basename
819 targetpath = poolfile.fullpath
820 queuepath = os.path.join(self.path, poolfile_basename)
824 # We need to copy instead of symlink
826 utils.copy(targetpath, queuepath)
827 # NULL in the fileid field implies a copy
830 os.symlink(targetpath, queuepath)
831 qf.fileid = poolfile.file_id
835 # Get the same session as the PoolFile is using and add the qf to it
836 DBConn().session().object_session(poolfile).add(qf)
841 __all__.append('BuildQueue')
844 def get_build_queue(queuename, session=None):
846 Returns BuildQueue object for given C{queue name}, creating it if it does not
849 @type queuename: string
850 @param queuename: The name of the queue
852 @type session: Session
853 @param session: Optional SQLA session object (a temporary one will be
854 generated if not supplied)
857 @return: BuildQueue object for the given queue
860 q = session.query(BuildQueue).filter_by(queue_name=queuename)
864 except NoResultFound:
867 __all__.append('get_build_queue')
869 ################################################################################
871 class BuildQueueFile(object):
872 def __init__(self, *args, **kwargs):
876 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
880 return os.path.join(self.buildqueue.path, self.filename)
883 __all__.append('BuildQueueFile')
885 ################################################################################
887 class ChangePendingBinary(object):
888 def __init__(self, *args, **kwargs):
892 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
894 __all__.append('ChangePendingBinary')
896 ################################################################################
898 class ChangePendingFile(object):
899 def __init__(self, *args, **kwargs):
903 return '<ChangePendingFile %s>' % self.change_pending_file_id
905 __all__.append('ChangePendingFile')
907 ################################################################################
909 class ChangePendingSource(object):
910 def __init__(self, *args, **kwargs):
914 return '<ChangePendingSource %s>' % self.change_pending_source_id
916 __all__.append('ChangePendingSource')
918 ################################################################################
920 class Component(ORMObject):
921 def __init__(self, component_name = None):
922 self.component_name = component_name
924 def __eq__(self, val):
925 if isinstance(val, str):
926 return (self.component_name == val)
927 # This signals to use the normal comparison operator
928 return NotImplemented
930 def __ne__(self, val):
931 if isinstance(val, str):
932 return (self.component_name != val)
933 # This signals to use the normal comparison operator
934 return NotImplemented
936 def properties(self):
937 return ['component_name', 'component_id', 'description', \
938 'location_count', 'meets_dfsg', 'overrides_count']
940 def not_null_constraints(self):
941 return ['component_name']
944 __all__.append('Component')
947 def get_component(component, session=None):
949 Returns database id for given C{component}.
951 @type component: string
952 @param component: The name of the override type
955 @return: the database id for the given component
958 component = component.lower()
960 q = session.query(Component).filter_by(component_name=component)
964 except NoResultFound:
967 __all__.append('get_component')
969 ################################################################################
971 class DBConfig(object):
972 def __init__(self, *args, **kwargs):
976 return '<DBConfig %s>' % self.name
978 __all__.append('DBConfig')
980 ################################################################################
983 def get_or_set_contents_file_id(filename, session=None):
985 Returns database id for given filename.
987 If no matching file is found, a row is inserted.
989 @type filename: string
990 @param filename: The filename
991 @type session: SQLAlchemy
992 @param session: Optional SQL session object (a temporary one will be
993 generated if not supplied). If not passed, a commit will be performed at
994 the end of the function, otherwise the caller is responsible for commiting.
997 @return: the database id for the given component
1000 q = session.query(ContentFilename).filter_by(filename=filename)
1003 ret = q.one().cafilename_id
1004 except NoResultFound:
1005 cf = ContentFilename()
1006 cf.filename = filename
1008 session.commit_or_flush()
1009 ret = cf.cafilename_id
1013 __all__.append('get_or_set_contents_file_id')
1016 def get_contents(suite, overridetype, section=None, session=None):
1018 Returns contents for a suite / overridetype combination, limiting
1019 to a section if not None.
1022 @param suite: Suite object
1024 @type overridetype: OverrideType
1025 @param overridetype: OverrideType object
1027 @type section: Section
1028 @param section: Optional section object to limit results to
1030 @type session: SQLAlchemy
1031 @param session: Optional SQL session object (a temporary one will be
1032 generated if not supplied)
1034 @rtype: ResultsProxy
1035 @return: ResultsProxy object set up to return tuples of (filename, section,
1039 # find me all of the contents for a given suite
1040 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1044 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1045 JOIN content_file_names n ON (c.filename=n.id)
1046 JOIN binaries b ON (b.id=c.binary_pkg)
1047 JOIN override o ON (o.package=b.package)
1048 JOIN section s ON (s.id=o.section)
1049 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1050 AND b.type=:overridetypename"""
1052 vals = {'suiteid': suite.suite_id,
1053 'overridetypeid': overridetype.overridetype_id,
1054 'overridetypename': overridetype.overridetype}
1056 if section is not None:
1057 contents_q += " AND s.id = :sectionid"
1058 vals['sectionid'] = section.section_id
1060 contents_q += " ORDER BY fn"
1062 return session.execute(contents_q, vals)
1064 __all__.append('get_contents')
1066 ################################################################################
1068 class ContentFilepath(object):
1069 def __init__(self, *args, **kwargs):
1073 return '<ContentFilepath %s>' % self.filepath
1075 __all__.append('ContentFilepath')
1078 def get_or_set_contents_path_id(filepath, session=None):
1080 Returns database id for given path.
1082 If no matching file is found, a row is inserted.
1084 @type filepath: string
1085 @param filepath: The filepath
1087 @type session: SQLAlchemy
1088 @param session: Optional SQL session object (a temporary one will be
1089 generated if not supplied). If not passed, a commit will be performed at
1090 the end of the function, otherwise the caller is responsible for commiting.
1093 @return: the database id for the given path
1096 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1099 ret = q.one().cafilepath_id
1100 except NoResultFound:
1101 cf = ContentFilepath()
1102 cf.filepath = filepath
1104 session.commit_or_flush()
1105 ret = cf.cafilepath_id
1109 __all__.append('get_or_set_contents_path_id')
1111 ################################################################################
1113 class ContentAssociation(object):
1114 def __init__(self, *args, **kwargs):
1118 return '<ContentAssociation %s>' % self.ca_id
1120 __all__.append('ContentAssociation')
1122 def insert_content_paths(binary_id, fullpaths, session=None):
1124 Make sure given path is associated with given binary id
1126 @type binary_id: int
1127 @param binary_id: the id of the binary
1128 @type fullpaths: list
1129 @param fullpaths: the list of paths of the file being associated with the binary
1130 @type session: SQLAlchemy session
1131 @param session: Optional SQLAlchemy session. If this is passed, the caller
1132 is responsible for ensuring a transaction has begun and committing the
1133 results or rolling back based on the result code. If not passed, a commit
1134 will be performed at the end of the function, otherwise the caller is
1135 responsible for commiting.
1137 @return: True upon success
1140 privatetrans = False
1142 session = DBConn().session()
1147 def generate_path_dicts():
1148 for fullpath in fullpaths:
1149 if fullpath.startswith( './' ):
1150 fullpath = fullpath[2:]
1152 yield {'filename':fullpath, 'id': binary_id }
1154 for d in generate_path_dicts():
1155 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1164 traceback.print_exc()
1166 # Only rollback if we set up the session ourself
1173 __all__.append('insert_content_paths')
1175 ################################################################################
1177 class DSCFile(object):
1178 def __init__(self, *args, **kwargs):
1182 return '<DSCFile %s>' % self.dscfile_id
1184 __all__.append('DSCFile')
1187 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1189 Returns a list of DSCFiles which may be empty
1191 @type dscfile_id: int (optional)
1192 @param dscfile_id: the dscfile_id of the DSCFiles to find
1194 @type source_id: int (optional)
1195 @param source_id: the source id related to the DSCFiles to find
1197 @type poolfile_id: int (optional)
1198 @param poolfile_id: the poolfile id related to the DSCFiles to find
1201 @return: Possibly empty list of DSCFiles
1204 q = session.query(DSCFile)
1206 if dscfile_id is not None:
1207 q = q.filter_by(dscfile_id=dscfile_id)
1209 if source_id is not None:
1210 q = q.filter_by(source_id=source_id)
1212 if poolfile_id is not None:
1213 q = q.filter_by(poolfile_id=poolfile_id)
1217 __all__.append('get_dscfiles')
1219 ################################################################################
1221 class PoolFile(ORMObject):
1222 def __init__(self, filename = None, location = None, filesize = -1, \
1224 self.filename = filename
1225 self.location = location
1226 self.filesize = filesize
1227 self.md5sum = md5sum
1231 return os.path.join(self.location.path, self.filename)
1233 def is_valid(self, filesize = -1, md5sum = None):
1234 return self.filesize == long(filesize) and self.md5sum == md5sum
1236 def properties(self):
1237 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1238 'sha256sum', 'location', 'source', 'binary', 'last_used']
1240 def not_null_constraints(self):
1241 return ['filename', 'md5sum', 'location']
1243 __all__.append('PoolFile')
1246 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1249 (ValidFileFound [boolean], PoolFile object or None)
1251 @type filename: string
1252 @param filename: the filename of the file to check against the DB
1255 @param filesize: the size of the file to check against the DB
1257 @type md5sum: string
1258 @param md5sum: the md5sum of the file to check against the DB
1260 @type location_id: int
1261 @param location_id: the id of the location to look in
1264 @return: Tuple of length 2.
1265 - If valid pool file found: (C{True}, C{PoolFile object})
1266 - If valid pool file not found:
1267 - (C{False}, C{None}) if no file found
1268 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1271 poolfile = session.query(Location).get(location_id). \
1272 files.filter_by(filename=filename).first()
1274 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1277 return (valid, poolfile)
1279 __all__.append('check_poolfile')
1281 # TODO: the implementation can trivially be inlined at the place where the
1282 # function is called
1284 def get_poolfile_by_id(file_id, session=None):
1286 Returns a PoolFile objects or None for the given id
1289 @param file_id: the id of the file to look for
1291 @rtype: PoolFile or None
1292 @return: either the PoolFile object or None
1295 return session.query(PoolFile).get(file_id)
1297 __all__.append('get_poolfile_by_id')
1300 def get_poolfile_like_name(filename, session=None):
1302 Returns an array of PoolFile objects which are like the given name
1304 @type filename: string
1305 @param filename: the filename of the file to check against the DB
1308 @return: array of PoolFile objects
1311 # TODO: There must be a way of properly using bind parameters with %FOO%
1312 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1316 __all__.append('get_poolfile_like_name')
1319 def add_poolfile(filename, datadict, location_id, session=None):
1321 Add a new file to the pool
1323 @type filename: string
1324 @param filename: filename
1326 @type datadict: dict
1327 @param datadict: dict with needed data
1329 @type location_id: int
1330 @param location_id: database id of the location
1333 @return: the PoolFile object created
1335 poolfile = PoolFile()
1336 poolfile.filename = filename
1337 poolfile.filesize = datadict["size"]
1338 poolfile.md5sum = datadict["md5sum"]
1339 poolfile.sha1sum = datadict["sha1sum"]
1340 poolfile.sha256sum = datadict["sha256sum"]
1341 poolfile.location_id = location_id
1343 session.add(poolfile)
1344 # Flush to get a file id (NB: This is not a commit)
1349 __all__.append('add_poolfile')
1351 ################################################################################
1353 class Fingerprint(ORMObject):
1354 def __init__(self, fingerprint = None):
1355 self.fingerprint = fingerprint
1357 def properties(self):
1358 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1361 def not_null_constraints(self):
1362 return ['fingerprint']
1364 __all__.append('Fingerprint')
1367 def get_fingerprint(fpr, session=None):
1369 Returns Fingerprint object for given fpr.
1372 @param fpr: The fpr to find / add
1374 @type session: SQLAlchemy
1375 @param session: Optional SQL session object (a temporary one will be
1376 generated if not supplied).
1379 @return: the Fingerprint object for the given fpr or None
1382 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1386 except NoResultFound:
1391 __all__.append('get_fingerprint')
1394 def get_or_set_fingerprint(fpr, session=None):
1396 Returns Fingerprint object for given fpr.
1398 If no matching fpr is found, a row is inserted.
1401 @param fpr: The fpr to find / add
1403 @type session: SQLAlchemy
1404 @param session: Optional SQL session object (a temporary one will be
1405 generated if not supplied). If not passed, a commit will be performed at
1406 the end of the function, otherwise the caller is responsible for commiting.
1407 A flush will be performed either way.
1410 @return: the Fingerprint object for the given fpr
1413 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1417 except NoResultFound:
1418 fingerprint = Fingerprint()
1419 fingerprint.fingerprint = fpr
1420 session.add(fingerprint)
1421 session.commit_or_flush()
1426 __all__.append('get_or_set_fingerprint')
1428 ################################################################################
1430 # Helper routine for Keyring class
1431 def get_ldap_name(entry):
1433 for k in ["cn", "mn", "sn"]:
1435 if ret and ret[0] != "" and ret[0] != "-":
1437 return " ".join(name)
1439 ################################################################################
1441 class Keyring(object):
1442 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1443 " --with-colons --fingerprint --fingerprint"
1448 def __init__(self, *args, **kwargs):
1452 return '<Keyring %s>' % self.keyring_name
1454 def de_escape_gpg_str(self, txt):
1455 esclist = re.split(r'(\\x..)', txt)
1456 for x in range(1,len(esclist),2):
1457 esclist[x] = "%c" % (int(esclist[x][2:],16))
1458 return "".join(esclist)
1460 def parse_address(self, uid):
1461 """parses uid and returns a tuple of real name and email address"""
1463 (name, address) = email.Utils.parseaddr(uid)
1464 name = re.sub(r"\s*[(].*[)]", "", name)
1465 name = self.de_escape_gpg_str(name)
1468 return (name, address)
1470 def load_keys(self, keyring):
1471 if not self.keyring_id:
1472 raise Exception('Must be initialized with database information')
1474 k = os.popen(self.gpg_invocation % keyring, "r")
1478 for line in k.xreadlines():
1479 field = line.split(":")
1480 if field[0] == "pub":
1483 (name, addr) = self.parse_address(field[9])
1485 self.keys[key]["email"] = addr
1486 self.keys[key]["name"] = name
1487 self.keys[key]["fingerprints"] = []
1489 elif key and field[0] == "sub" and len(field) >= 12:
1490 signingkey = ("s" in field[11])
1491 elif key and field[0] == "uid":
1492 (name, addr) = self.parse_address(field[9])
1493 if "email" not in self.keys[key] and "@" in addr:
1494 self.keys[key]["email"] = addr
1495 self.keys[key]["name"] = name
1496 elif signingkey and field[0] == "fpr":
1497 self.keys[key]["fingerprints"].append(field[9])
1498 self.fpr_lookup[field[9]] = key
1500 def import_users_from_ldap(self, session):
1504 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1505 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1507 l = ldap.open(LDAPServer)
1508 l.simple_bind_s("","")
1509 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1510 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1511 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1513 ldap_fin_uid_id = {}
1520 uid = entry["uid"][0]
1521 name = get_ldap_name(entry)
1522 fingerprints = entry["keyFingerPrint"]
1524 for f in fingerprints:
1525 key = self.fpr_lookup.get(f, None)
1526 if key not in self.keys:
1528 self.keys[key]["uid"] = uid
1532 keyid = get_or_set_uid(uid, session).uid_id
1533 byuid[keyid] = (uid, name)
1534 byname[uid] = (keyid, name)
1536 return (byname, byuid)
1538 def generate_users_from_keyring(self, format, session):
1542 for x in self.keys.keys():
1543 if "email" not in self.keys[x]:
1545 self.keys[x]["uid"] = format % "invalid-uid"
1547 uid = format % self.keys[x]["email"]
1548 keyid = get_or_set_uid(uid, session).uid_id
1549 byuid[keyid] = (uid, self.keys[x]["name"])
1550 byname[uid] = (keyid, self.keys[x]["name"])
1551 self.keys[x]["uid"] = uid
1554 uid = format % "invalid-uid"
1555 keyid = get_or_set_uid(uid, session).uid_id
1556 byuid[keyid] = (uid, "ungeneratable user id")
1557 byname[uid] = (keyid, "ungeneratable user id")
1559 return (byname, byuid)
1561 __all__.append('Keyring')
1564 def get_keyring(keyring, session=None):
1566 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1567 If C{keyring} already has an entry, simply return the existing Keyring
1569 @type keyring: string
1570 @param keyring: the keyring name
1573 @return: the Keyring object for this keyring
1576 q = session.query(Keyring).filter_by(keyring_name=keyring)
1580 except NoResultFound:
1583 __all__.append('get_keyring')
1585 ################################################################################
1587 class KeyringACLMap(object):
1588 def __init__(self, *args, **kwargs):
1592 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1594 __all__.append('KeyringACLMap')
1596 ################################################################################
1598 class DBChange(object):
1599 def __init__(self, *args, **kwargs):
1603 return '<DBChange %s>' % self.changesname
1605 def clean_from_queue(self):
1606 session = DBConn().session().object_session(self)
1608 # Remove changes_pool_files entries
1611 # Remove changes_pending_files references
1614 # Clear out of queue
1615 self.in_queue = None
1616 self.approved_for_id = None
1618 __all__.append('DBChange')
1621 def get_dbchange(filename, session=None):
1623 returns DBChange object for given C{filename}.
1625 @type filename: string
1626 @param filename: the name of the file
1628 @type session: Session
1629 @param session: Optional SQLA session object (a temporary one will be
1630 generated if not supplied)
1633 @return: DBChange object for the given filename (C{None} if not present)
1636 q = session.query(DBChange).filter_by(changesname=filename)
1640 except NoResultFound:
1643 __all__.append('get_dbchange')
1645 ################################################################################
1647 class Location(ORMObject):
1648 def __init__(self, path = None, component = None):
1650 self.component = component
1651 # the column 'type' should go away, see comment at mapper
1652 self.archive_type = 'pool'
1654 def properties(self):
1655 return ['path', 'location_id', 'archive_type', 'component', \
1658 def not_null_constraints(self):
1659 return ['path', 'archive_type']
1661 __all__.append('Location')
1664 def get_location(location, component=None, archive=None, session=None):
1666 Returns Location object for the given combination of location, component
1669 @type location: string
1670 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1672 @type component: string
1673 @param component: the component name (if None, no restriction applied)
1675 @type archive: string
1676 @param archive: the archive name (if None, no restriction applied)
1678 @rtype: Location / None
1679 @return: Either a Location object or None if one can't be found
1682 q = session.query(Location).filter_by(path=location)
1684 if archive is not None:
1685 q = q.join(Archive).filter_by(archive_name=archive)
1687 if component is not None:
1688 q = q.join(Component).filter_by(component_name=component)
1692 except NoResultFound:
1695 __all__.append('get_location')
1697 ################################################################################
1699 class Maintainer(ORMObject):
1700 def __init__(self, name = None):
1703 def properties(self):
1704 return ['name', 'maintainer_id']
1706 def not_null_constraints(self):
1709 def get_split_maintainer(self):
1710 if not hasattr(self, 'name') or self.name is None:
1711 return ('', '', '', '')
1713 return fix_maintainer(self.name.strip())
1715 __all__.append('Maintainer')
1718 def get_or_set_maintainer(name, session=None):
1720 Returns Maintainer object for given maintainer name.
1722 If no matching maintainer name is found, a row is inserted.
1725 @param name: The maintainer name to add
1727 @type session: SQLAlchemy
1728 @param session: Optional SQL session object (a temporary one will be
1729 generated if not supplied). If not passed, a commit will be performed at
1730 the end of the function, otherwise the caller is responsible for commiting.
1731 A flush will be performed either way.
1734 @return: the Maintainer object for the given maintainer
1737 q = session.query(Maintainer).filter_by(name=name)
1740 except NoResultFound:
1741 maintainer = Maintainer()
1742 maintainer.name = name
1743 session.add(maintainer)
1744 session.commit_or_flush()
1749 __all__.append('get_or_set_maintainer')
1752 def get_maintainer(maintainer_id, session=None):
1754 Return the name of the maintainer behind C{maintainer_id} or None if that
1755 maintainer_id is invalid.
1757 @type maintainer_id: int
1758 @param maintainer_id: the id of the maintainer
1761 @return: the Maintainer with this C{maintainer_id}
1764 return session.query(Maintainer).get(maintainer_id)
1766 __all__.append('get_maintainer')
1768 ################################################################################
1770 class NewComment(object):
1771 def __init__(self, *args, **kwargs):
1775 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1777 __all__.append('NewComment')
1780 def has_new_comment(package, version, session=None):
1782 Returns true if the given combination of C{package}, C{version} has a comment.
1784 @type package: string
1785 @param package: name of the package
1787 @type version: string
1788 @param version: package version
1790 @type session: Session
1791 @param session: Optional SQLA session object (a temporary one will be
1792 generated if not supplied)
1798 q = session.query(NewComment)
1799 q = q.filter_by(package=package)
1800 q = q.filter_by(version=version)
1802 return bool(q.count() > 0)
1804 __all__.append('has_new_comment')
1807 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1809 Returns (possibly empty) list of NewComment objects for the given
1812 @type package: string (optional)
1813 @param package: name of the package
1815 @type version: string (optional)
1816 @param version: package version
1818 @type comment_id: int (optional)
1819 @param comment_id: An id of a comment
1821 @type session: Session
1822 @param session: Optional SQLA session object (a temporary one will be
1823 generated if not supplied)
1826 @return: A (possibly empty) list of NewComment objects will be returned
1829 q = session.query(NewComment)
1830 if package is not None: q = q.filter_by(package=package)
1831 if version is not None: q = q.filter_by(version=version)
1832 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1836 __all__.append('get_new_comments')
1838 ################################################################################
1840 class Override(ORMObject):
1841 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1842 section = None, priority = None):
1843 self.package = package
1845 self.component = component
1846 self.overridetype = overridetype
1847 self.section = section
1848 self.priority = priority
1850 def properties(self):
1851 return ['package', 'suite', 'component', 'overridetype', 'section', \
1854 def not_null_constraints(self):
1855 return ['package', 'suite', 'component', 'overridetype', 'section']
1857 __all__.append('Override')
1860 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1862 Returns Override object for the given parameters
1864 @type package: string
1865 @param package: The name of the package
1867 @type suite: string, list or None
1868 @param suite: The name of the suite (or suites if a list) to limit to. If
1869 None, don't limit. Defaults to None.
1871 @type component: string, list or None
1872 @param component: The name of the component (or components if a list) to
1873 limit to. If None, don't limit. Defaults to None.
1875 @type overridetype: string, list or None
1876 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1877 limit to. If None, don't limit. Defaults to None.
1879 @type session: Session
1880 @param session: Optional SQLA session object (a temporary one will be
1881 generated if not supplied)
1884 @return: A (possibly empty) list of Override objects will be returned
1887 q = session.query(Override)
1888 q = q.filter_by(package=package)
1890 if suite is not None:
1891 if not isinstance(suite, list): suite = [suite]
1892 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1894 if component is not None:
1895 if not isinstance(component, list): component = [component]
1896 q = q.join(Component).filter(Component.component_name.in_(component))
1898 if overridetype is not None:
1899 if not isinstance(overridetype, list): overridetype = [overridetype]
1900 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1904 __all__.append('get_override')
1907 ################################################################################
1909 class OverrideType(ORMObject):
1910 def __init__(self, overridetype = None):
1911 self.overridetype = overridetype
1913 def properties(self):
1914 return ['overridetype', 'overridetype_id', 'overrides_count']
1916 def not_null_constraints(self):
1917 return ['overridetype']
1919 __all__.append('OverrideType')
1922 def get_override_type(override_type, session=None):
1924 Returns OverrideType object for given C{override type}.
1926 @type override_type: string
1927 @param override_type: The name of the override type
1929 @type session: Session
1930 @param session: Optional SQLA session object (a temporary one will be
1931 generated if not supplied)
1934 @return: the database id for the given override type
1937 q = session.query(OverrideType).filter_by(overridetype=override_type)
1941 except NoResultFound:
1944 __all__.append('get_override_type')
1946 ################################################################################
1948 class DebContents(object):
1949 def __init__(self, *args, **kwargs):
1953 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1955 __all__.append('DebContents')
1958 class UdebContents(object):
1959 def __init__(self, *args, **kwargs):
1963 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1965 __all__.append('UdebContents')
1967 class PendingBinContents(object):
1968 def __init__(self, *args, **kwargs):
1972 return '<PendingBinContents %s>' % self.contents_id
1974 __all__.append('PendingBinContents')
1976 def insert_pending_content_paths(package,
1981 Make sure given paths are temporarily associated with given
1985 @param package: the package to associate with should have been read in from the binary control file
1986 @type fullpaths: list
1987 @param fullpaths: the list of paths of the file being associated with the binary
1988 @type session: SQLAlchemy session
1989 @param session: Optional SQLAlchemy session. If this is passed, the caller
1990 is responsible for ensuring a transaction has begun and committing the
1991 results or rolling back based on the result code. If not passed, a commit
1992 will be performed at the end of the function
1994 @return: True upon success, False if there is a problem
1997 privatetrans = False
2000 session = DBConn().session()
2004 arch = get_architecture(package['Architecture'], session)
2005 arch_id = arch.arch_id
2007 # Remove any already existing recorded files for this package
2008 q = session.query(PendingBinContents)
2009 q = q.filter_by(package=package['Package'])
2010 q = q.filter_by(version=package['Version'])
2011 q = q.filter_by(architecture=arch_id)
2014 for fullpath in fullpaths:
2016 if fullpath.startswith( "./" ):
2017 fullpath = fullpath[2:]
2019 pca = PendingBinContents()
2020 pca.package = package['Package']
2021 pca.version = package['Version']
2023 pca.architecture = arch_id
2026 pca.type = 8 # gross
2028 pca.type = 7 # also gross
2031 # Only commit if we set up the session ourself
2039 except Exception, e:
2040 traceback.print_exc()
2042 # Only rollback if we set up the session ourself
2049 __all__.append('insert_pending_content_paths')
2051 ################################################################################
2053 class PolicyQueue(object):
2054 def __init__(self, *args, **kwargs):
2058 return '<PolicyQueue %s>' % self.queue_name
2060 __all__.append('PolicyQueue')
2063 def get_policy_queue(queuename, session=None):
2065 Returns PolicyQueue object for given C{queue name}
2067 @type queuename: string
2068 @param queuename: The name of the queue
2070 @type session: Session
2071 @param session: Optional SQLA session object (a temporary one will be
2072 generated if not supplied)
2075 @return: PolicyQueue object for the given queue
2078 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2082 except NoResultFound:
2085 __all__.append('get_policy_queue')
2088 def get_policy_queue_from_path(pathname, session=None):
2090 Returns PolicyQueue object for given C{path name}
2092 @type queuename: string
2093 @param queuename: The path
2095 @type session: Session
2096 @param session: Optional SQLA session object (a temporary one will be
2097 generated if not supplied)
2100 @return: PolicyQueue object for the given queue
2103 q = session.query(PolicyQueue).filter_by(path=pathname)
2107 except NoResultFound:
2110 __all__.append('get_policy_queue_from_path')
2112 ################################################################################
2114 class Priority(ORMObject):
2115 def __init__(self, priority = None, level = None):
2116 self.priority = priority
2119 def properties(self):
2120 return ['priority', 'priority_id', 'level', 'overrides_count']
2122 def not_null_constraints(self):
2123 return ['priority', 'level']
2125 def __eq__(self, val):
2126 if isinstance(val, str):
2127 return (self.priority == val)
2128 # This signals to use the normal comparison operator
2129 return NotImplemented
2131 def __ne__(self, val):
2132 if isinstance(val, str):
2133 return (self.priority != val)
2134 # This signals to use the normal comparison operator
2135 return NotImplemented
2137 __all__.append('Priority')
2140 def get_priority(priority, session=None):
2142 Returns Priority object for given C{priority name}.
2144 @type priority: string
2145 @param priority: The name of the priority
2147 @type session: Session
2148 @param session: Optional SQLA session object (a temporary one will be
2149 generated if not supplied)
2152 @return: Priority object for the given priority
2155 q = session.query(Priority).filter_by(priority=priority)
2159 except NoResultFound:
2162 __all__.append('get_priority')
2165 def get_priorities(session=None):
2167 Returns dictionary of priority names -> id mappings
2169 @type session: Session
2170 @param session: Optional SQL session object (a temporary one will be
2171 generated if not supplied)
2174 @return: dictionary of priority names -> id mappings
2178 q = session.query(Priority)
2180 ret[x.priority] = x.priority_id
2184 __all__.append('get_priorities')
2186 ################################################################################
2188 class Section(ORMObject):
2189 def __init__(self, section = None):
2190 self.section = section
2192 def properties(self):
2193 return ['section', 'section_id', 'overrides_count']
2195 def not_null_constraints(self):
2198 def __eq__(self, val):
2199 if isinstance(val, str):
2200 return (self.section == val)
2201 # This signals to use the normal comparison operator
2202 return NotImplemented
2204 def __ne__(self, val):
2205 if isinstance(val, str):
2206 return (self.section != val)
2207 # This signals to use the normal comparison operator
2208 return NotImplemented
2210 __all__.append('Section')
2213 def get_section(section, session=None):
2215 Returns Section object for given C{section name}.
2217 @type section: string
2218 @param section: The name of the section
2220 @type session: Session
2221 @param session: Optional SQLA session object (a temporary one will be
2222 generated if not supplied)
2225 @return: Section object for the given section name
2228 q = session.query(Section).filter_by(section=section)
2232 except NoResultFound:
2235 __all__.append('get_section')
2238 def get_sections(session=None):
2240 Returns dictionary of section names -> id mappings
2242 @type session: Session
2243 @param session: Optional SQL session object (a temporary one will be
2244 generated if not supplied)
2247 @return: dictionary of section names -> id mappings
2251 q = session.query(Section)
2253 ret[x.section] = x.section_id
2257 __all__.append('get_sections')
2259 ################################################################################
2261 class DBSource(ORMObject):
2262 def __init__(self, source = None, version = None, maintainer = None, \
2263 changedby = None, poolfile = None, install_date = None):
2264 self.source = source
2265 self.version = version
2266 self.maintainer = maintainer
2267 self.changedby = changedby
2268 self.poolfile = poolfile
2269 self.install_date = install_date
2271 def properties(self):
2272 return ['source', 'source_id', 'maintainer', 'changedby', \
2273 'fingerprint', 'poolfile', 'version', 'suites_count', \
2274 'install_date', 'binaries_count']
2276 def not_null_constraints(self):
2277 return ['source', 'version', 'install_date', 'maintainer', \
2278 'changedby', 'poolfile', 'install_date']
2280 __all__.append('DBSource')
2283 def source_exists(source, source_version, suites = ["any"], session=None):
2285 Ensure that source exists somewhere in the archive for the binary
2286 upload being processed.
2287 1. exact match => 1.0-3
2288 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2290 @type source: string
2291 @param source: source name
2293 @type source_version: string
2294 @param source_version: expected source version
2297 @param suites: list of suites to check in, default I{any}
2299 @type session: Session
2300 @param session: Optional SQLA session object (a temporary one will be
2301 generated if not supplied)
2304 @return: returns 1 if a source with expected version is found, otherwise 0
2311 from daklib.regexes import re_bin_only_nmu
2312 orig_source_version = re_bin_only_nmu.sub('', source_version)
2314 for suite in suites:
2315 q = session.query(DBSource).filter_by(source=source). \
2316 filter(DBSource.version.in_([source_version, orig_source_version]))
2318 # source must exist in suite X, or in some other suite that's
2319 # mapped to X, recursively... silent-maps are counted too,
2320 # unreleased-maps aren't.
2321 maps = cnf.ValueList("SuiteMappings")[:]
2323 maps = [ m.split() for m in maps ]
2324 maps = [ (x[1], x[2]) for x in maps
2325 if x[0] == "map" or x[0] == "silent-map" ]
2327 for (from_, to) in maps:
2328 if from_ in s and to not in s:
2331 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2336 # No source found so return not ok
2341 __all__.append('source_exists')
2344 def get_suites_source_in(source, session=None):
2346 Returns list of Suite objects which given C{source} name is in
2349 @param source: DBSource package name to search for
2352 @return: list of Suite objects for the given source
2355 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2357 __all__.append('get_suites_source_in')
2360 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2362 Returns list of DBSource objects for given C{source} name and other parameters
2365 @param source: DBSource package name to search for
2367 @type version: str or None
2368 @param version: DBSource version name to search for or None if not applicable
2370 @type dm_upload_allowed: bool
2371 @param dm_upload_allowed: If None, no effect. If True or False, only
2372 return packages with that dm_upload_allowed setting
2374 @type session: Session
2375 @param session: Optional SQL session object (a temporary one will be
2376 generated if not supplied)
2379 @return: list of DBSource objects for the given name (may be empty)
2382 q = session.query(DBSource).filter_by(source=source)
2384 if version is not None:
2385 q = q.filter_by(version=version)
2387 if dm_upload_allowed is not None:
2388 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2392 __all__.append('get_sources_from_name')
2394 # FIXME: This function fails badly if it finds more than 1 source package and
2395 # its implementation is trivial enough to be inlined.
2397 def get_source_in_suite(source, suite, session=None):
2399 Returns a DBSource object for a combination of C{source} and C{suite}.
2401 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2402 - B{suite} - a suite name, eg. I{unstable}
2404 @type source: string
2405 @param source: source package name
2408 @param suite: the suite name
2411 @return: the version for I{source} in I{suite}
2415 q = get_suite(suite, session).get_sources(source)
2418 except NoResultFound:
2421 __all__.append('get_source_in_suite')
2423 ################################################################################
2426 def add_dsc_to_db(u, filename, session=None):
2427 entry = u.pkg.files[filename]
2431 source.source = u.pkg.dsc["source"]
2432 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2433 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2434 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2435 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2436 source.install_date = datetime.now().date()
2438 dsc_component = entry["component"]
2439 dsc_location_id = entry["location id"]
2441 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2443 # Set up a new poolfile if necessary
2444 if not entry.has_key("files id") or not entry["files id"]:
2445 filename = entry["pool name"] + filename
2446 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2448 pfs.append(poolfile)
2449 entry["files id"] = poolfile.file_id
2451 source.poolfile_id = entry["files id"]
2454 suite_names = u.pkg.changes["distribution"].keys()
2455 source.suites = session.query(Suite). \
2456 filter(Suite.suite_name.in_(suite_names)).all()
2458 # Add the source files to the DB (files and dsc_files)
2460 dscfile.source_id = source.source_id
2461 dscfile.poolfile_id = entry["files id"]
2462 session.add(dscfile)
2464 for dsc_file, dentry in u.pkg.dsc_files.items():
2466 df.source_id = source.source_id
2468 # If the .orig tarball is already in the pool, it's
2469 # files id is stored in dsc_files by check_dsc().
2470 files_id = dentry.get("files id", None)
2472 # Find the entry in the files hash
2473 # TODO: Bail out here properly
2475 for f, e in u.pkg.files.items():
2480 if files_id is None:
2481 filename = dfentry["pool name"] + dsc_file
2483 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2484 # FIXME: needs to check for -1/-2 and or handle exception
2485 if found and obj is not None:
2486 files_id = obj.file_id
2489 # If still not found, add it
2490 if files_id is None:
2491 # HACK: Force sha1sum etc into dentry
2492 dentry["sha1sum"] = dfentry["sha1sum"]
2493 dentry["sha256sum"] = dfentry["sha256sum"]
2494 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2495 pfs.append(poolfile)
2496 files_id = poolfile.file_id
2498 poolfile = get_poolfile_by_id(files_id, session)
2499 if poolfile is None:
2500 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2501 pfs.append(poolfile)
2503 df.poolfile_id = files_id
2506 # Add the src_uploaders to the DB
2507 uploader_ids = [source.maintainer_id]
2508 if u.pkg.dsc.has_key("uploaders"):
2509 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2511 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2514 for up_id in uploader_ids:
2515 if added_ids.has_key(up_id):
2517 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2523 su.maintainer_id = up_id
2524 su.source_id = source.source_id
2529 return source, dsc_component, dsc_location_id, pfs
2531 __all__.append('add_dsc_to_db')
2534 def add_deb_to_db(u, filename, session=None):
2536 Contrary to what you might expect, this routine deals with both
2537 debs and udebs. That info is in 'dbtype', whilst 'type' is
2538 'deb' for both of them
2541 entry = u.pkg.files[filename]
2544 bin.package = entry["package"]
2545 bin.version = entry["version"]
2546 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2547 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2548 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2549 bin.binarytype = entry["dbtype"]
2552 filename = entry["pool name"] + filename
2553 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2554 if not entry.get("location id", None):
2555 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2557 if entry.get("files id", None):
2558 poolfile = get_poolfile_by_id(bin.poolfile_id)
2559 bin.poolfile_id = entry["files id"]
2561 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2562 bin.poolfile_id = entry["files id"] = poolfile.file_id
2565 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2566 if len(bin_sources) != 1:
2567 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2568 (bin.package, bin.version, entry["architecture"],
2569 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2571 bin.source_id = bin_sources[0].source_id
2573 # Add and flush object so it has an ID
2576 suite_names = u.pkg.changes["distribution"].keys()
2577 bin.suites = session.query(Suite). \
2578 filter(Suite.suite_name.in_(suite_names)).all()
2582 # Deal with contents - disabled for now
2583 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2585 # print "REJECT\nCould not determine contents of package %s" % bin.package
2586 # session.rollback()
2587 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2591 __all__.append('add_deb_to_db')
2593 ################################################################################
2595 class SourceACL(object):
2596 def __init__(self, *args, **kwargs):
2600 return '<SourceACL %s>' % self.source_acl_id
2602 __all__.append('SourceACL')
2604 ################################################################################
2606 class SrcFormat(object):
2607 def __init__(self, *args, **kwargs):
2611 return '<SrcFormat %s>' % (self.format_name)
2613 __all__.append('SrcFormat')
2615 ################################################################################
2617 class SrcUploader(object):
2618 def __init__(self, *args, **kwargs):
2622 return '<SrcUploader %s>' % self.uploader_id
2624 __all__.append('SrcUploader')
2626 ################################################################################
2628 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2629 ('SuiteID', 'suite_id'),
2630 ('Version', 'version'),
2631 ('Origin', 'origin'),
2633 ('Description', 'description'),
2634 ('Untouchable', 'untouchable'),
2635 ('Announce', 'announce'),
2636 ('Codename', 'codename'),
2637 ('OverrideCodename', 'overridecodename'),
2638 ('ValidTime', 'validtime'),
2639 ('Priority', 'priority'),
2640 ('NotAutomatic', 'notautomatic'),
2641 ('CopyChanges', 'copychanges'),
2642 ('OverrideSuite', 'overridesuite')]
2644 # Why the heck don't we have any UNIQUE constraints in table suite?
2645 # TODO: Add UNIQUE constraints for appropriate columns.
2646 class Suite(ORMObject):
2647 def __init__(self, suite_name = None, version = None):
2648 self.suite_name = suite_name
2649 self.version = version
2651 def properties(self):
2652 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2655 def not_null_constraints(self):
2656 return ['suite_name', 'version']
2658 def __eq__(self, val):
2659 if isinstance(val, str):
2660 return (self.suite_name == val)
2661 # This signals to use the normal comparison operator
2662 return NotImplemented
2664 def __ne__(self, val):
2665 if isinstance(val, str):
2666 return (self.suite_name != val)
2667 # This signals to use the normal comparison operator
2668 return NotImplemented
2672 for disp, field in SUITE_FIELDS:
2673 val = getattr(self, field, None)
2675 ret.append("%s: %s" % (disp, val))
2677 return "\n".join(ret)
2679 def get_architectures(self, skipsrc=False, skipall=False):
2681 Returns list of Architecture objects
2683 @type skipsrc: boolean
2684 @param skipsrc: Whether to skip returning the 'source' architecture entry
2687 @type skipall: boolean
2688 @param skipall: Whether to skip returning the 'all' architecture entry
2692 @return: list of Architecture objects for the given name (may be empty)
2695 q = object_session(self).query(Architecture).with_parent(self)
2697 q = q.filter(Architecture.arch_string != 'source')
2699 q = q.filter(Architecture.arch_string != 'all')
2700 return q.order_by(Architecture.arch_string).all()
2702 def get_sources(self, source):
2704 Returns a query object representing DBSource that is part of C{suite}.
2706 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2708 @type source: string
2709 @param source: source package name
2711 @rtype: sqlalchemy.orm.query.Query
2712 @return: a query of DBSource
2716 session = object_session(self)
2717 return session.query(DBSource).filter_by(source = source). \
2720 __all__.append('Suite')
2723 def get_suite(suite, session=None):
2725 Returns Suite object for given C{suite name}.
2728 @param suite: The name of the suite
2730 @type session: Session
2731 @param session: Optional SQLA session object (a temporary one will be
2732 generated if not supplied)
2735 @return: Suite object for the requested suite name (None if not present)
2738 q = session.query(Suite).filter_by(suite_name=suite)
2742 except NoResultFound:
2745 __all__.append('get_suite')
2747 ################################################################################
2749 # TODO: should be removed because the implementation is too trivial
2751 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2753 Returns list of Architecture objects for given C{suite} name
2756 @param suite: Suite name to search for
2758 @type skipsrc: boolean
2759 @param skipsrc: Whether to skip returning the 'source' architecture entry
2762 @type skipall: boolean
2763 @param skipall: Whether to skip returning the 'all' architecture entry
2766 @type session: Session
2767 @param session: Optional SQL session object (a temporary one will be
2768 generated if not supplied)
2771 @return: list of Architecture objects for the given name (may be empty)
2774 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2776 __all__.append('get_suite_architectures')
2778 ################################################################################
2780 class SuiteSrcFormat(object):
2781 def __init__(self, *args, **kwargs):
2785 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2787 __all__.append('SuiteSrcFormat')
2790 def get_suite_src_formats(suite, session=None):
2792 Returns list of allowed SrcFormat for C{suite}.
2795 @param suite: Suite name to search for
2797 @type session: Session
2798 @param session: Optional SQL session object (a temporary one will be
2799 generated if not supplied)
2802 @return: the list of allowed source formats for I{suite}
2805 q = session.query(SrcFormat)
2806 q = q.join(SuiteSrcFormat)
2807 q = q.join(Suite).filter_by(suite_name=suite)
2808 q = q.order_by('format_name')
2812 __all__.append('get_suite_src_formats')
2814 ################################################################################
2816 class Uid(ORMObject):
2817 def __init__(self, uid = None, name = None):
2821 def __eq__(self, val):
2822 if isinstance(val, str):
2823 return (self.uid == val)
2824 # This signals to use the normal comparison operator
2825 return NotImplemented
2827 def __ne__(self, val):
2828 if isinstance(val, str):
2829 return (self.uid != val)
2830 # This signals to use the normal comparison operator
2831 return NotImplemented
2833 def properties(self):
2834 return ['uid', 'name', 'fingerprint']
2836 def not_null_constraints(self):
2839 __all__.append('Uid')
2842 def get_or_set_uid(uidname, session=None):
2844 Returns uid object for given uidname.
2846 If no matching uidname is found, a row is inserted.
2848 @type uidname: string
2849 @param uidname: The uid to add
2851 @type session: SQLAlchemy
2852 @param session: Optional SQL session object (a temporary one will be
2853 generated if not supplied). If not passed, a commit will be performed at
2854 the end of the function, otherwise the caller is responsible for commiting.
2857 @return: the uid object for the given uidname
2860 q = session.query(Uid).filter_by(uid=uidname)
2864 except NoResultFound:
2868 session.commit_or_flush()
2873 __all__.append('get_or_set_uid')
2876 def get_uid_from_fingerprint(fpr, session=None):
2877 q = session.query(Uid)
2878 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2882 except NoResultFound:
2885 __all__.append('get_uid_from_fingerprint')
2887 ################################################################################
2889 class UploadBlock(object):
2890 def __init__(self, *args, **kwargs):
2894 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2896 __all__.append('UploadBlock')
2898 ################################################################################
2900 class DBConn(object):
2902 database module init.
2906 def __init__(self, *args, **kwargs):
2907 self.__dict__ = self.__shared_state
2909 if not getattr(self, 'initialised', False):
2910 self.initialised = True
2911 self.debug = kwargs.has_key('debug')
2914 def __setuptables(self):
2915 tables_with_primary = (
2923 'build_queue_files',
2928 'changes_pending_binaries',
2929 'changes_pending_files',
2930 'changes_pending_source',
2940 'pending_bin_contents',
2954 tables_no_primary = (
2955 'changes_pending_files_map',
2956 'changes_pending_source_files',
2957 'changes_pool_files',
2959 # TODO: the maintainer column in table override should be removed.
2961 'suite_architectures',
2962 'suite_src_formats',
2963 'suite_build_queue_copy',
2968 'almost_obsolete_all_associations',
2969 'almost_obsolete_src_associations',
2970 'any_associations_source',
2971 'bin_assoc_by_arch',
2972 'bin_associations_binaries',
2973 'binaries_suite_arch',
2974 'binfiles_suite_component_arch',
2977 'newest_all_associations',
2978 'newest_any_associations',
2980 'newest_src_association',
2981 'obsolete_all_associations',
2982 'obsolete_any_associations',
2983 'obsolete_any_by_all_associations',
2984 'obsolete_src_associations',
2986 'src_associations_bin',
2987 'src_associations_src',
2988 'suite_arch_by_name',
2991 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2992 # correctly and that is why we have to use a workaround. It can
2993 # be removed as soon as we switch to version 0.6.
2994 for table_name in tables_with_primary:
2995 table = Table(table_name, self.db_meta, \
2996 Column('id', Integer, primary_key = True), \
2997 autoload=True, useexisting=True)
2998 setattr(self, 'tbl_%s' % table_name, table)
3000 for table_name in tables_no_primary:
3001 table = Table(table_name, self.db_meta, autoload=True)
3002 setattr(self, 'tbl_%s' % table_name, table)
3004 # bin_contents needs special attention until update #41 has been
3006 self.tbl_bin_contents = Table('bin_contents', self.db_meta, \
3007 Column('file', Text, primary_key = True),
3008 Column('binary_id', Integer, ForeignKey('binaries.id'), \
3009 primary_key = True),
3010 autoload=True, useexisting=True)
3012 for view_name in views:
3013 view = Table(view_name, self.db_meta, autoload=True)
3014 setattr(self, 'view_%s' % view_name, view)
3016 def __setupmappers(self):
3017 mapper(Architecture, self.tbl_architecture,
3018 properties = dict(arch_id = self.tbl_architecture.c.id,
3019 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3020 order_by='suite_name',
3021 backref=backref('architectures', order_by='arch_string'))),
3022 extension = validator)
3024 mapper(Archive, self.tbl_archive,
3025 properties = dict(archive_id = self.tbl_archive.c.id,
3026 archive_name = self.tbl_archive.c.name))
3028 mapper(PendingBinContents, self.tbl_pending_bin_contents,
3029 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3030 filename = self.tbl_pending_bin_contents.c.filename,
3031 package = self.tbl_pending_bin_contents.c.package,
3032 version = self.tbl_pending_bin_contents.c.version,
3033 arch = self.tbl_pending_bin_contents.c.arch,
3034 otype = self.tbl_pending_bin_contents.c.type))
3036 mapper(DebContents, self.tbl_deb_contents,
3037 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3038 package=self.tbl_deb_contents.c.package,
3039 suite=self.tbl_deb_contents.c.suite,
3040 arch=self.tbl_deb_contents.c.arch,
3041 section=self.tbl_deb_contents.c.section,
3042 filename=self.tbl_deb_contents.c.filename))
3044 mapper(UdebContents, self.tbl_udeb_contents,
3045 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3046 package=self.tbl_udeb_contents.c.package,
3047 suite=self.tbl_udeb_contents.c.suite,
3048 arch=self.tbl_udeb_contents.c.arch,
3049 section=self.tbl_udeb_contents.c.section,
3050 filename=self.tbl_udeb_contents.c.filename))
3052 mapper(BuildQueue, self.tbl_build_queue,
3053 properties = dict(queue_id = self.tbl_build_queue.c.id))
3055 mapper(BuildQueueFile, self.tbl_build_queue_files,
3056 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3057 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3059 mapper(DBBinary, self.tbl_binaries,
3060 properties = dict(binary_id = self.tbl_binaries.c.id,
3061 package = self.tbl_binaries.c.package,
3062 version = self.tbl_binaries.c.version,
3063 maintainer_id = self.tbl_binaries.c.maintainer,
3064 maintainer = relation(Maintainer),
3065 source_id = self.tbl_binaries.c.source,
3066 source = relation(DBSource, backref='binaries'),
3067 arch_id = self.tbl_binaries.c.architecture,
3068 architecture = relation(Architecture),
3069 poolfile_id = self.tbl_binaries.c.file,
3070 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3071 binarytype = self.tbl_binaries.c.type,
3072 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3073 fingerprint = relation(Fingerprint),
3074 install_date = self.tbl_binaries.c.install_date,
3075 suites = relation(Suite, secondary=self.tbl_bin_associations,
3076 backref=backref('binaries', lazy='dynamic'))),
3077 extension = validator)
3079 mapper(BinaryACL, self.tbl_binary_acl,
3080 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3082 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3083 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3084 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3085 architecture = relation(Architecture)))
3087 mapper(Component, self.tbl_component,
3088 properties = dict(component_id = self.tbl_component.c.id,
3089 component_name = self.tbl_component.c.name),
3090 extension = validator)
3092 mapper(DBConfig, self.tbl_config,
3093 properties = dict(config_id = self.tbl_config.c.id))
3095 mapper(DSCFile, self.tbl_dsc_files,
3096 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3097 source_id = self.tbl_dsc_files.c.source,
3098 source = relation(DBSource),
3099 poolfile_id = self.tbl_dsc_files.c.file,
3100 poolfile = relation(PoolFile)))
3102 mapper(PoolFile, self.tbl_files,
3103 properties = dict(file_id = self.tbl_files.c.id,
3104 filesize = self.tbl_files.c.size,
3105 location_id = self.tbl_files.c.location,
3106 location = relation(Location,
3107 # using lazy='dynamic' in the back
3108 # reference because we have A LOT of
3109 # files in one location
3110 backref=backref('files', lazy='dynamic'))),
3111 extension = validator)
3113 mapper(Fingerprint, self.tbl_fingerprint,
3114 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3115 uid_id = self.tbl_fingerprint.c.uid,
3116 uid = relation(Uid),
3117 keyring_id = self.tbl_fingerprint.c.keyring,
3118 keyring = relation(Keyring),
3119 source_acl = relation(SourceACL),
3120 binary_acl = relation(BinaryACL)),
3121 extension = validator)
3123 mapper(Keyring, self.tbl_keyrings,
3124 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3125 keyring_id = self.tbl_keyrings.c.id))
3127 mapper(DBChange, self.tbl_changes,
3128 properties = dict(change_id = self.tbl_changes.c.id,
3129 poolfiles = relation(PoolFile,
3130 secondary=self.tbl_changes_pool_files,
3131 backref="changeslinks"),
3132 seen = self.tbl_changes.c.seen,
3133 source = self.tbl_changes.c.source,
3134 binaries = self.tbl_changes.c.binaries,
3135 architecture = self.tbl_changes.c.architecture,
3136 distribution = self.tbl_changes.c.distribution,
3137 urgency = self.tbl_changes.c.urgency,
3138 maintainer = self.tbl_changes.c.maintainer,
3139 changedby = self.tbl_changes.c.changedby,
3140 date = self.tbl_changes.c.date,
3141 version = self.tbl_changes.c.version,
3142 files = relation(ChangePendingFile,
3143 secondary=self.tbl_changes_pending_files_map,
3144 backref="changesfile"),
3145 in_queue_id = self.tbl_changes.c.in_queue,
3146 in_queue = relation(PolicyQueue,
3147 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3148 approved_for_id = self.tbl_changes.c.approved_for))
3150 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3151 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3153 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3154 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3155 filename = self.tbl_changes_pending_files.c.filename,
3156 size = self.tbl_changes_pending_files.c.size,
3157 md5sum = self.tbl_changes_pending_files.c.md5sum,
3158 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3159 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3161 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3162 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3163 change = relation(DBChange),
3164 maintainer = relation(Maintainer,
3165 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3166 changedby = relation(Maintainer,
3167 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3168 fingerprint = relation(Fingerprint),
3169 source_files = relation(ChangePendingFile,
3170 secondary=self.tbl_changes_pending_source_files,
3171 backref="pending_sources")))
3174 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3175 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3176 keyring = relation(Keyring, backref="keyring_acl_map"),
3177 architecture = relation(Architecture)))
3179 mapper(Location, self.tbl_location,
3180 properties = dict(location_id = self.tbl_location.c.id,
3181 component_id = self.tbl_location.c.component,
3182 component = relation(Component, backref='location'),
3183 archive_id = self.tbl_location.c.archive,
3184 archive = relation(Archive),
3185 # FIXME: the 'type' column is old cruft and
3186 # should be removed in the future.
3187 archive_type = self.tbl_location.c.type),
3188 extension = validator)
3190 mapper(Maintainer, self.tbl_maintainer,
3191 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3192 maintains_sources = relation(DBSource, backref='maintainer',
3193 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3194 changed_sources = relation(DBSource, backref='changedby',
3195 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3196 extension = validator)
3198 mapper(NewComment, self.tbl_new_comments,
3199 properties = dict(comment_id = self.tbl_new_comments.c.id))
3201 mapper(Override, self.tbl_override,
3202 properties = dict(suite_id = self.tbl_override.c.suite,
3203 suite = relation(Suite, \
3204 backref=backref('overrides', lazy='dynamic')),
3205 package = self.tbl_override.c.package,
3206 component_id = self.tbl_override.c.component,
3207 component = relation(Component, \
3208 backref=backref('overrides', lazy='dynamic')),
3209 priority_id = self.tbl_override.c.priority,
3210 priority = relation(Priority, \
3211 backref=backref('overrides', lazy='dynamic')),
3212 section_id = self.tbl_override.c.section,
3213 section = relation(Section, \
3214 backref=backref('overrides', lazy='dynamic')),
3215 overridetype_id = self.tbl_override.c.type,
3216 overridetype = relation(OverrideType, \
3217 backref=backref('overrides', lazy='dynamic'))))
3219 mapper(OverrideType, self.tbl_override_type,
3220 properties = dict(overridetype = self.tbl_override_type.c.type,
3221 overridetype_id = self.tbl_override_type.c.id))
3223 mapper(PolicyQueue, self.tbl_policy_queue,
3224 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3226 mapper(Priority, self.tbl_priority,
3227 properties = dict(priority_id = self.tbl_priority.c.id))
3229 mapper(Section, self.tbl_section,
3230 properties = dict(section_id = self.tbl_section.c.id,
3231 section=self.tbl_section.c.section))
3233 mapper(DBSource, self.tbl_source,
3234 properties = dict(source_id = self.tbl_source.c.id,
3235 version = self.tbl_source.c.version,
3236 maintainer_id = self.tbl_source.c.maintainer,
3237 poolfile_id = self.tbl_source.c.file,
3238 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3239 fingerprint_id = self.tbl_source.c.sig_fpr,
3240 fingerprint = relation(Fingerprint),
3241 changedby_id = self.tbl_source.c.changedby,
3242 srcfiles = relation(DSCFile,
3243 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3244 suites = relation(Suite, secondary=self.tbl_src_associations,
3245 backref=backref('sources', lazy='dynamic')),
3246 srcuploaders = relation(SrcUploader)),
3247 extension = validator)
3249 mapper(SourceACL, self.tbl_source_acl,
3250 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3252 mapper(SrcFormat, self.tbl_src_format,
3253 properties = dict(src_format_id = self.tbl_src_format.c.id,
3254 format_name = self.tbl_src_format.c.format_name))
3256 mapper(SrcUploader, self.tbl_src_uploaders,
3257 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3258 source_id = self.tbl_src_uploaders.c.source,
3259 source = relation(DBSource,
3260 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3261 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3262 maintainer = relation(Maintainer,
3263 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3265 mapper(Suite, self.tbl_suite,
3266 properties = dict(suite_id = self.tbl_suite.c.id,
3267 policy_queue = relation(PolicyQueue),
3268 copy_queues = relation(BuildQueue,
3269 secondary=self.tbl_suite_build_queue_copy)),
3270 extension = validator)
3272 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3273 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3274 suite = relation(Suite, backref='suitesrcformats'),
3275 src_format_id = self.tbl_suite_src_formats.c.src_format,
3276 src_format = relation(SrcFormat)))
3278 mapper(Uid, self.tbl_uid,
3279 properties = dict(uid_id = self.tbl_uid.c.id,
3280 fingerprint = relation(Fingerprint)),
3281 extension = validator)
3283 mapper(UploadBlock, self.tbl_upload_blocks,
3284 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3285 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3286 uid = relation(Uid, backref="uploadblocks")))
3288 mapper(BinContents, self.tbl_bin_contents,
3290 binary = relation(DBBinary,
3291 backref=backref('contents', lazy='dynamic')),
3292 file = self.tbl_bin_contents.c.file))
3294 ## Connection functions
3295 def __createconn(self):
3296 from config import Config
3300 connstr = "postgres://%s" % cnf["DB::Host"]
3301 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3302 connstr += ":%s" % cnf["DB::Port"]
3303 connstr += "/%s" % cnf["DB::Name"]
3306 connstr = "postgres:///%s" % cnf["DB::Name"]
3307 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3308 connstr += "?port=%s" % cnf["DB::Port"]
3310 self.db_pg = create_engine(connstr, echo=self.debug)
3311 self.db_meta = MetaData()
3312 self.db_meta.bind = self.db_pg
3313 self.db_smaker = sessionmaker(bind=self.db_pg,
3317 self.__setuptables()
3318 self.__setupmappers()
3321 return self.db_smaker()
3323 __all__.append('DBConn')