5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper
63 from sqlalchemy import types as sqltypes
65 # Don't remove this, we re-export the exceptions to scripts which import us
66 from sqlalchemy.exc import *
67 from sqlalchemy.orm.exc import NoResultFound
69 # Only import Config until Queue stuff is changed to store its config
71 from config import Config
72 from textutils import fix_maintainer
73 from dak_exceptions import DBUpdateError, NoSourceFieldError
75 # suppress some deprecation warnings in squeeze related to sqlalchemy
77 warnings.filterwarnings('ignore', \
78 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
80 # TODO: sqlalchemy needs some extra configuration to correctly reflect
81 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
82 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
85 ################################################################################
87 # Patch in support for the debversion field type so that it works during
91 # that is for sqlalchemy 0.6
92 UserDefinedType = sqltypes.UserDefinedType
94 # this one for sqlalchemy 0.5
95 UserDefinedType = sqltypes.TypeEngine
97 class DebVersion(UserDefinedType):
98 def get_col_spec(self):
101 def bind_processor(self, dialect):
104 # ' = None' is needed for sqlalchemy 0.5:
105 def result_processor(self, dialect, coltype = None):
108 sa_major_version = sqlalchemy.__version__[0:3]
109 if sa_major_version in ["0.5", "0.6"]:
110 from sqlalchemy.databases import postgres
111 postgres.ischema_names['debversion'] = DebVersion
113 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
115 ################################################################################
117 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
119 ################################################################################
121 def session_wrapper(fn):
123 Wrapper around common ".., session=None):" handling. If the wrapped
124 function is called without passing 'session', we create a local one
125 and destroy it when the function ends.
127 Also attaches a commit_or_flush method to the session; if we created a
128 local session, this is a synonym for session.commit(), otherwise it is a
129 synonym for session.flush().
132 def wrapped(*args, **kwargs):
133 private_transaction = False
135 # Find the session object
136 session = kwargs.get('session')
139 if len(args) <= len(getargspec(fn)[0]) - 1:
140 # No session specified as last argument or in kwargs
141 private_transaction = True
142 session = kwargs['session'] = DBConn().session()
144 # Session is last argument in args
148 session = args[-1] = DBConn().session()
149 private_transaction = True
151 if private_transaction:
152 session.commit_or_flush = session.commit
154 session.commit_or_flush = session.flush
157 return fn(*args, **kwargs)
159 if private_transaction:
160 # We created a session; close it.
163 wrapped.__doc__ = fn.__doc__
164 wrapped.func_name = fn.func_name
168 __all__.append('session_wrapper')
170 ################################################################################
172 class ORMObject(object):
174 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
175 derived classes must implement the properties() method.
178 def properties(self):
180 This method should be implemented by all derived classes and returns a
181 list of the important properties. The properties 'created' and
182 'modified' will be added automatically. A suffix '_count' should be
183 added to properties that are lists or query objects. The most important
184 property name should be returned as the first element in the list
185 because it is used by repr().
191 Returns a JSON representation of the object based on the properties
192 returned from the properties() method.
195 # add created and modified
196 all_properties = self.properties() + ['created', 'modified']
197 for property in all_properties:
198 # check for list or query
199 if property[-6:] == '_count':
200 real_property = property[:-6]
201 if not hasattr(self, real_property):
203 value = getattr(self, real_property)
204 if hasattr(value, '__len__'):
207 elif hasattr(value, 'count'):
209 value = value.count()
211 raise KeyError('Do not understand property %s.' % property)
213 if not hasattr(self, property):
216 value = getattr(self, property)
220 elif isinstance(value, ORMObject):
221 # use repr() for ORMObject types
224 # we want a string for all other types because json cannot
227 data[property] = value
228 return json.dumps(data)
232 Returns the name of the class.
234 return type(self).__name__
238 Returns a short string representation of the object using the first
239 element from the properties() method.
241 primary_property = self.properties()[0]
242 value = getattr(self, primary_property)
243 return '<%s %s>' % (self.classname(), str(value))
247 Returns a human readable form of the object using the properties()
250 return '<%s %s>' % (self.classname(), self.json())
252 def not_null_constraints(self):
254 Returns a list of properties that must be not NULL. Derived classes
255 should override this method if needed.
259 validation_message = \
260 "Validation failed because property '%s' must not be empty in object\n%s"
264 This function validates the not NULL constraints as returned by
265 not_null_constraints(). It raises the DBUpdateError exception if
268 for property in self.not_null_constraints():
269 # TODO: It is a bit awkward that the mapper configuration allow
270 # directly setting the numeric _id columns. We should get rid of it
272 if hasattr(self, property + '_id') and \
273 getattr(self, property + '_id') is not None:
275 if not hasattr(self, property) or getattr(self, property) is None:
276 raise DBUpdateError(self.validation_message % \
277 (property, str(self)))
281 def get(cls, primary_key, session = None):
283 This is a support function that allows getting an object by its primary
286 Architecture.get(3[, session])
288 instead of the more verbose
290 session.query(Architecture).get(3)
292 return session.query(cls).get(primary_key)
294 def session(self, replace = False):
296 Returns the current session that is associated with the object. May
297 return None is object is in detached state.
300 return object_session(self)
302 def clone(self, session = None):
304 Clones the current object in a new session and returns the new clone. A
305 fresh session is created if the optional session parameter is not
306 provided. The function will fail if a session is provided and has
309 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
310 an existing object to allow several threads to work with their own
311 instances of an ORMObject.
313 WARNING: Only persistent (committed) objects can be cloned. Changes
314 made to the original object that are not committed yet will get lost.
315 The session of the new object will always be rolled back to avoid
319 if self.session() is None:
320 raise RuntimeError( \
321 'Method clone() failed for detached object:\n%s' % self)
322 self.session().flush()
323 mapper = object_mapper(self)
324 primary_key = mapper.primary_key_from_instance(self)
325 object_class = self.__class__
327 session = DBConn().session()
328 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
329 raise RuntimeError( \
330 'Method clone() failed due to unflushed changes in session.')
331 new_object = session.query(object_class).get(primary_key)
333 if new_object is None:
334 raise RuntimeError( \
335 'Method clone() failed for non-persistent object:\n%s' % self)
338 __all__.append('ORMObject')
340 ################################################################################
342 class Validator(MapperExtension):
344 This class calls the validate() method for each instance for the
345 'before_update' and 'before_insert' events. A global object validator is
346 used for configuring the individual mappers.
349 def before_update(self, mapper, connection, instance):
353 def before_insert(self, mapper, connection, instance):
357 validator = Validator()
359 ################################################################################
361 class Architecture(ORMObject):
362 def __init__(self, arch_string = None, description = None):
363 self.arch_string = arch_string
364 self.description = description
366 def __eq__(self, val):
367 if isinstance(val, str):
368 return (self.arch_string== val)
369 # This signals to use the normal comparison operator
370 return NotImplemented
372 def __ne__(self, val):
373 if isinstance(val, str):
374 return (self.arch_string != val)
375 # This signals to use the normal comparison operator
376 return NotImplemented
378 def properties(self):
379 return ['arch_string', 'arch_id', 'suites_count']
381 def not_null_constraints(self):
382 return ['arch_string']
384 __all__.append('Architecture')
387 def get_architecture(architecture, session=None):
389 Returns database id for given C{architecture}.
391 @type architecture: string
392 @param architecture: The name of the architecture
394 @type session: Session
395 @param session: Optional SQLA session object (a temporary one will be
396 generated if not supplied)
399 @return: Architecture object for the given arch (None if not present)
402 q = session.query(Architecture).filter_by(arch_string=architecture)
406 except NoResultFound:
409 __all__.append('get_architecture')
411 # TODO: should be removed because the implementation is too trivial
413 def get_architecture_suites(architecture, session=None):
415 Returns list of Suite objects for given C{architecture} name
417 @type architecture: str
418 @param architecture: Architecture name to search for
420 @type session: Session
421 @param session: Optional SQL session object (a temporary one will be
422 generated if not supplied)
425 @return: list of Suite objects for the given name (may be empty)
428 return get_architecture(architecture, session).suites
430 __all__.append('get_architecture_suites')
432 ################################################################################
434 class Archive(object):
435 def __init__(self, *args, **kwargs):
439 return '<Archive %s>' % self.archive_name
441 __all__.append('Archive')
444 def get_archive(archive, session=None):
446 returns database id for given C{archive}.
448 @type archive: string
449 @param archive: the name of the arhive
451 @type session: Session
452 @param session: Optional SQLA session object (a temporary one will be
453 generated if not supplied)
456 @return: Archive object for the given name (None if not present)
459 archive = archive.lower()
461 q = session.query(Archive).filter_by(archive_name=archive)
465 except NoResultFound:
468 __all__.append('get_archive')
470 ################################################################################
472 class BinContents(ORMObject):
473 def __init__(self, file = None, binary = None):
477 def properties(self):
478 return ['file', 'binary']
480 __all__.append('BinContents')
482 ################################################################################
484 class DBBinary(ORMObject):
485 def __init__(self, package = None, source = None, version = None, \
486 maintainer = None, architecture = None, poolfile = None, \
488 self.package = package
490 self.version = version
491 self.maintainer = maintainer
492 self.architecture = architecture
493 self.poolfile = poolfile
494 self.binarytype = binarytype
496 def properties(self):
497 return ['package', 'version', 'maintainer', 'source', 'architecture', \
498 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
499 'suites_count', 'binary_id', 'contents_count']
501 def not_null_constraints(self):
502 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
505 def get_component_name(self):
506 return self.poolfile.location.component.component_name
508 def scan_contents(self):
510 Yields the contents of the package. Only regular files are yielded and
511 the path names are normalized.
513 fullpath = self.poolfile.fullpath
514 debdata = Popen(['dpkg-deb', '--fsys-tarfile', fullpath],
515 stdout = PIPE).stdout
516 tar = TarFile.open(fileobj = debdata, mode = 'r|')
517 for member in tar.getmembers():
519 yield normpath(member.name)
523 __all__.append('DBBinary')
526 def get_suites_binary_in(package, session=None):
528 Returns list of Suite objects which given C{package} name is in
531 @param package: DBBinary package name to search for
534 @return: list of Suite objects for the given package
537 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
539 __all__.append('get_suites_binary_in')
542 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
544 Returns the component name of the newest binary package in suite_list or
545 None if no package is found. The result can be optionally filtered by a list
546 of architecture names.
549 @param package: DBBinary package name to search for
551 @type suite_list: list of str
552 @param suite_list: list of suite_name items
554 @type arch_list: list of str
555 @param arch_list: optional list of arch_string items that defaults to []
557 @rtype: str or NoneType
558 @return: name of component or None
561 q = session.query(DBBinary).filter_by(package = package). \
562 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
563 if len(arch_list) > 0:
564 q = q.join(DBBinary.architecture). \
565 filter(Architecture.arch_string.in_(arch_list))
566 binary = q.order_by(desc(DBBinary.version)).first()
570 return binary.get_component_name()
572 __all__.append('get_component_by_package_suite')
574 ################################################################################
576 class BinaryACL(object):
577 def __init__(self, *args, **kwargs):
581 return '<BinaryACL %s>' % self.binary_acl_id
583 __all__.append('BinaryACL')
585 ################################################################################
587 class BinaryACLMap(object):
588 def __init__(self, *args, **kwargs):
592 return '<BinaryACLMap %s>' % self.binary_acl_map_id
594 __all__.append('BinaryACLMap')
596 ################################################################################
601 ArchiveDir "%(archivepath)s";
602 OverrideDir "%(overridedir)s";
603 CacheDir "%(cachedir)s";
608 Packages::Compress ". bzip2 gzip";
609 Sources::Compress ". bzip2 gzip";
614 bindirectory "incoming"
619 BinOverride "override.sid.all3";
620 BinCacheDB "packages-accepted.db";
622 FileList "%(filelist)s";
625 Packages::Extensions ".deb .udeb";
628 bindirectory "incoming/"
631 BinOverride "override.sid.all3";
632 SrcOverride "override.sid.all3.src";
633 FileList "%(filelist)s";
637 class BuildQueue(object):
638 def __init__(self, *args, **kwargs):
642 return '<BuildQueue %s>' % self.queue_name
644 def write_metadata(self, starttime, force=False):
645 # Do we write out metafiles?
646 if not (force or self.generate_metadata):
649 session = DBConn().session().object_session(self)
651 fl_fd = fl_name = ac_fd = ac_name = None
653 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
654 startdir = os.getcwd()
657 # Grab files we want to include
658 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
659 # Write file list with newer files
660 (fl_fd, fl_name) = mkstemp()
662 os.write(fl_fd, '%s\n' % n.fullpath)
667 # Write minimal apt.conf
668 # TODO: Remove hardcoding from template
669 (ac_fd, ac_name) = mkstemp()
670 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
672 'cachedir': cnf["Dir::Cache"],
673 'overridedir': cnf["Dir::Override"],
677 # Run apt-ftparchive generate
678 os.chdir(os.path.dirname(ac_name))
679 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
681 # Run apt-ftparchive release
682 # TODO: Eww - fix this
683 bname = os.path.basename(self.path)
687 # We have to remove the Release file otherwise it'll be included in the
690 os.unlink(os.path.join(bname, 'Release'))
694 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
696 # Crude hack with open and append, but this whole section is and should be redone.
697 if self.notautomatic:
698 release=open("Release", "a")
699 release.write("NotAutomatic: yes")
704 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
705 if cnf.has_key("Dinstall::SigningPubKeyring"):
706 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
708 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
710 # Move the files if we got this far
711 os.rename('Release', os.path.join(bname, 'Release'))
713 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
715 # Clean up any left behind files
742 def clean_and_update(self, starttime, Logger, dryrun=False):
743 """WARNING: This routine commits for you"""
744 session = DBConn().session().object_session(self)
746 if self.generate_metadata and not dryrun:
747 self.write_metadata(starttime)
749 # Grab files older than our execution time
750 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
756 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
758 Logger.log(["I: Removing %s from the queue" % o.fullpath])
759 os.unlink(o.fullpath)
762 # If it wasn't there, don't worry
763 if e.errno == ENOENT:
766 # TODO: Replace with proper logging call
767 Logger.log(["E: Could not remove %s" % o.fullpath])
774 for f in os.listdir(self.path):
775 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
779 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
780 except NoResultFound:
781 fp = os.path.join(self.path, f)
783 Logger.log(["I: Would remove unused link %s" % fp])
785 Logger.log(["I: Removing unused link %s" % fp])
789 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
791 def add_file_from_pool(self, poolfile):
792 """Copies a file into the pool. Assumes that the PoolFile object is
793 attached to the same SQLAlchemy session as the Queue object is.
795 The caller is responsible for committing after calling this function."""
796 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
798 # Check if we have a file of this name or this ID already
799 for f in self.queuefiles:
800 if f.fileid is not None and f.fileid == poolfile.file_id or \
801 f.poolfile.filename == poolfile_basename:
802 # In this case, update the BuildQueueFile entry so we
803 # don't remove it too early
804 f.lastused = datetime.now()
805 DBConn().session().object_session(poolfile).add(f)
808 # Prepare BuildQueueFile object
809 qf = BuildQueueFile()
810 qf.build_queue_id = self.queue_id
811 qf.lastused = datetime.now()
812 qf.filename = poolfile_basename
814 targetpath = poolfile.fullpath
815 queuepath = os.path.join(self.path, poolfile_basename)
819 # We need to copy instead of symlink
821 utils.copy(targetpath, queuepath)
822 # NULL in the fileid field implies a copy
825 os.symlink(targetpath, queuepath)
826 qf.fileid = poolfile.file_id
830 # Get the same session as the PoolFile is using and add the qf to it
831 DBConn().session().object_session(poolfile).add(qf)
836 __all__.append('BuildQueue')
839 def get_build_queue(queuename, session=None):
841 Returns BuildQueue object for given C{queue name}, creating it if it does not
844 @type queuename: string
845 @param queuename: The name of the queue
847 @type session: Session
848 @param session: Optional SQLA session object (a temporary one will be
849 generated if not supplied)
852 @return: BuildQueue object for the given queue
855 q = session.query(BuildQueue).filter_by(queue_name=queuename)
859 except NoResultFound:
862 __all__.append('get_build_queue')
864 ################################################################################
866 class BuildQueueFile(object):
867 def __init__(self, *args, **kwargs):
871 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
875 return os.path.join(self.buildqueue.path, self.filename)
878 __all__.append('BuildQueueFile')
880 ################################################################################
882 class ChangePendingBinary(object):
883 def __init__(self, *args, **kwargs):
887 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
889 __all__.append('ChangePendingBinary')
891 ################################################################################
893 class ChangePendingFile(object):
894 def __init__(self, *args, **kwargs):
898 return '<ChangePendingFile %s>' % self.change_pending_file_id
900 __all__.append('ChangePendingFile')
902 ################################################################################
904 class ChangePendingSource(object):
905 def __init__(self, *args, **kwargs):
909 return '<ChangePendingSource %s>' % self.change_pending_source_id
911 __all__.append('ChangePendingSource')
913 ################################################################################
915 class Component(ORMObject):
916 def __init__(self, component_name = None):
917 self.component_name = component_name
919 def __eq__(self, val):
920 if isinstance(val, str):
921 return (self.component_name == val)
922 # This signals to use the normal comparison operator
923 return NotImplemented
925 def __ne__(self, val):
926 if isinstance(val, str):
927 return (self.component_name != val)
928 # This signals to use the normal comparison operator
929 return NotImplemented
931 def properties(self):
932 return ['component_name', 'component_id', 'description', \
933 'location_count', 'meets_dfsg', 'overrides_count']
935 def not_null_constraints(self):
936 return ['component_name']
939 __all__.append('Component')
942 def get_component(component, session=None):
944 Returns database id for given C{component}.
946 @type component: string
947 @param component: The name of the override type
950 @return: the database id for the given component
953 component = component.lower()
955 q = session.query(Component).filter_by(component_name=component)
959 except NoResultFound:
962 __all__.append('get_component')
964 ################################################################################
966 class DBConfig(object):
967 def __init__(self, *args, **kwargs):
971 return '<DBConfig %s>' % self.name
973 __all__.append('DBConfig')
975 ################################################################################
978 def get_or_set_contents_file_id(filename, session=None):
980 Returns database id for given filename.
982 If no matching file is found, a row is inserted.
984 @type filename: string
985 @param filename: The filename
986 @type session: SQLAlchemy
987 @param session: Optional SQL session object (a temporary one will be
988 generated if not supplied). If not passed, a commit will be performed at
989 the end of the function, otherwise the caller is responsible for commiting.
992 @return: the database id for the given component
995 q = session.query(ContentFilename).filter_by(filename=filename)
998 ret = q.one().cafilename_id
999 except NoResultFound:
1000 cf = ContentFilename()
1001 cf.filename = filename
1003 session.commit_or_flush()
1004 ret = cf.cafilename_id
1008 __all__.append('get_or_set_contents_file_id')
1011 def get_contents(suite, overridetype, section=None, session=None):
1013 Returns contents for a suite / overridetype combination, limiting
1014 to a section if not None.
1017 @param suite: Suite object
1019 @type overridetype: OverrideType
1020 @param overridetype: OverrideType object
1022 @type section: Section
1023 @param section: Optional section object to limit results to
1025 @type session: SQLAlchemy
1026 @param session: Optional SQL session object (a temporary one will be
1027 generated if not supplied)
1029 @rtype: ResultsProxy
1030 @return: ResultsProxy object set up to return tuples of (filename, section,
1034 # find me all of the contents for a given suite
1035 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1039 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1040 JOIN content_file_names n ON (c.filename=n.id)
1041 JOIN binaries b ON (b.id=c.binary_pkg)
1042 JOIN override o ON (o.package=b.package)
1043 JOIN section s ON (s.id=o.section)
1044 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1045 AND b.type=:overridetypename"""
1047 vals = {'suiteid': suite.suite_id,
1048 'overridetypeid': overridetype.overridetype_id,
1049 'overridetypename': overridetype.overridetype}
1051 if section is not None:
1052 contents_q += " AND s.id = :sectionid"
1053 vals['sectionid'] = section.section_id
1055 contents_q += " ORDER BY fn"
1057 return session.execute(contents_q, vals)
1059 __all__.append('get_contents')
1061 ################################################################################
1063 class ContentFilepath(object):
1064 def __init__(self, *args, **kwargs):
1068 return '<ContentFilepath %s>' % self.filepath
1070 __all__.append('ContentFilepath')
1073 def get_or_set_contents_path_id(filepath, session=None):
1075 Returns database id for given path.
1077 If no matching file is found, a row is inserted.
1079 @type filepath: string
1080 @param filepath: The filepath
1082 @type session: SQLAlchemy
1083 @param session: Optional SQL session object (a temporary one will be
1084 generated if not supplied). If not passed, a commit will be performed at
1085 the end of the function, otherwise the caller is responsible for commiting.
1088 @return: the database id for the given path
1091 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1094 ret = q.one().cafilepath_id
1095 except NoResultFound:
1096 cf = ContentFilepath()
1097 cf.filepath = filepath
1099 session.commit_or_flush()
1100 ret = cf.cafilepath_id
1104 __all__.append('get_or_set_contents_path_id')
1106 ################################################################################
1108 class ContentAssociation(object):
1109 def __init__(self, *args, **kwargs):
1113 return '<ContentAssociation %s>' % self.ca_id
1115 __all__.append('ContentAssociation')
1117 def insert_content_paths(binary_id, fullpaths, session=None):
1119 Make sure given path is associated with given binary id
1121 @type binary_id: int
1122 @param binary_id: the id of the binary
1123 @type fullpaths: list
1124 @param fullpaths: the list of paths of the file being associated with the binary
1125 @type session: SQLAlchemy session
1126 @param session: Optional SQLAlchemy session. If this is passed, the caller
1127 is responsible for ensuring a transaction has begun and committing the
1128 results or rolling back based on the result code. If not passed, a commit
1129 will be performed at the end of the function, otherwise the caller is
1130 responsible for commiting.
1132 @return: True upon success
1135 privatetrans = False
1137 session = DBConn().session()
1142 def generate_path_dicts():
1143 for fullpath in fullpaths:
1144 if fullpath.startswith( './' ):
1145 fullpath = fullpath[2:]
1147 yield {'filename':fullpath, 'id': binary_id }
1149 for d in generate_path_dicts():
1150 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1159 traceback.print_exc()
1161 # Only rollback if we set up the session ourself
1168 __all__.append('insert_content_paths')
1170 ################################################################################
1172 class DSCFile(object):
1173 def __init__(self, *args, **kwargs):
1177 return '<DSCFile %s>' % self.dscfile_id
1179 __all__.append('DSCFile')
1182 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1184 Returns a list of DSCFiles which may be empty
1186 @type dscfile_id: int (optional)
1187 @param dscfile_id: the dscfile_id of the DSCFiles to find
1189 @type source_id: int (optional)
1190 @param source_id: the source id related to the DSCFiles to find
1192 @type poolfile_id: int (optional)
1193 @param poolfile_id: the poolfile id related to the DSCFiles to find
1196 @return: Possibly empty list of DSCFiles
1199 q = session.query(DSCFile)
1201 if dscfile_id is not None:
1202 q = q.filter_by(dscfile_id=dscfile_id)
1204 if source_id is not None:
1205 q = q.filter_by(source_id=source_id)
1207 if poolfile_id is not None:
1208 q = q.filter_by(poolfile_id=poolfile_id)
1212 __all__.append('get_dscfiles')
1214 ################################################################################
1216 class PoolFile(ORMObject):
1217 def __init__(self, filename = None, location = None, filesize = -1, \
1219 self.filename = filename
1220 self.location = location
1221 self.filesize = filesize
1222 self.md5sum = md5sum
1226 return os.path.join(self.location.path, self.filename)
1228 def is_valid(self, filesize = -1, md5sum = None):
1229 return self.filesize == long(filesize) and self.md5sum == md5sum
1231 def properties(self):
1232 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1233 'sha256sum', 'location', 'source', 'binary', 'last_used']
1235 def not_null_constraints(self):
1236 return ['filename', 'md5sum', 'location']
1238 __all__.append('PoolFile')
1241 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1244 (ValidFileFound [boolean], PoolFile object or None)
1246 @type filename: string
1247 @param filename: the filename of the file to check against the DB
1250 @param filesize: the size of the file to check against the DB
1252 @type md5sum: string
1253 @param md5sum: the md5sum of the file to check against the DB
1255 @type location_id: int
1256 @param location_id: the id of the location to look in
1259 @return: Tuple of length 2.
1260 - If valid pool file found: (C{True}, C{PoolFile object})
1261 - If valid pool file not found:
1262 - (C{False}, C{None}) if no file found
1263 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1266 poolfile = session.query(Location).get(location_id). \
1267 files.filter_by(filename=filename).first()
1269 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1272 return (valid, poolfile)
1274 __all__.append('check_poolfile')
1276 # TODO: the implementation can trivially be inlined at the place where the
1277 # function is called
1279 def get_poolfile_by_id(file_id, session=None):
1281 Returns a PoolFile objects or None for the given id
1284 @param file_id: the id of the file to look for
1286 @rtype: PoolFile or None
1287 @return: either the PoolFile object or None
1290 return session.query(PoolFile).get(file_id)
1292 __all__.append('get_poolfile_by_id')
1295 def get_poolfile_like_name(filename, session=None):
1297 Returns an array of PoolFile objects which are like the given name
1299 @type filename: string
1300 @param filename: the filename of the file to check against the DB
1303 @return: array of PoolFile objects
1306 # TODO: There must be a way of properly using bind parameters with %FOO%
1307 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1311 __all__.append('get_poolfile_like_name')
1314 def add_poolfile(filename, datadict, location_id, session=None):
1316 Add a new file to the pool
1318 @type filename: string
1319 @param filename: filename
1321 @type datadict: dict
1322 @param datadict: dict with needed data
1324 @type location_id: int
1325 @param location_id: database id of the location
1328 @return: the PoolFile object created
1330 poolfile = PoolFile()
1331 poolfile.filename = filename
1332 poolfile.filesize = datadict["size"]
1333 poolfile.md5sum = datadict["md5sum"]
1334 poolfile.sha1sum = datadict["sha1sum"]
1335 poolfile.sha256sum = datadict["sha256sum"]
1336 poolfile.location_id = location_id
1338 session.add(poolfile)
1339 # Flush to get a file id (NB: This is not a commit)
1344 __all__.append('add_poolfile')
1346 ################################################################################
1348 class Fingerprint(ORMObject):
1349 def __init__(self, fingerprint = None):
1350 self.fingerprint = fingerprint
1352 def properties(self):
1353 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1356 def not_null_constraints(self):
1357 return ['fingerprint']
1359 __all__.append('Fingerprint')
1362 def get_fingerprint(fpr, session=None):
1364 Returns Fingerprint object for given fpr.
1367 @param fpr: The fpr to find / add
1369 @type session: SQLAlchemy
1370 @param session: Optional SQL session object (a temporary one will be
1371 generated if not supplied).
1374 @return: the Fingerprint object for the given fpr or None
1377 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1381 except NoResultFound:
1386 __all__.append('get_fingerprint')
1389 def get_or_set_fingerprint(fpr, session=None):
1391 Returns Fingerprint object for given fpr.
1393 If no matching fpr is found, a row is inserted.
1396 @param fpr: The fpr to find / add
1398 @type session: SQLAlchemy
1399 @param session: Optional SQL session object (a temporary one will be
1400 generated if not supplied). If not passed, a commit will be performed at
1401 the end of the function, otherwise the caller is responsible for commiting.
1402 A flush will be performed either way.
1405 @return: the Fingerprint object for the given fpr
1408 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1412 except NoResultFound:
1413 fingerprint = Fingerprint()
1414 fingerprint.fingerprint = fpr
1415 session.add(fingerprint)
1416 session.commit_or_flush()
1421 __all__.append('get_or_set_fingerprint')
1423 ################################################################################
1425 # Helper routine for Keyring class
1426 def get_ldap_name(entry):
1428 for k in ["cn", "mn", "sn"]:
1430 if ret and ret[0] != "" and ret[0] != "-":
1432 return " ".join(name)
1434 ################################################################################
1436 class Keyring(object):
1437 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1438 " --with-colons --fingerprint --fingerprint"
1443 def __init__(self, *args, **kwargs):
1447 return '<Keyring %s>' % self.keyring_name
1449 def de_escape_gpg_str(self, txt):
1450 esclist = re.split(r'(\\x..)', txt)
1451 for x in range(1,len(esclist),2):
1452 esclist[x] = "%c" % (int(esclist[x][2:],16))
1453 return "".join(esclist)
1455 def parse_address(self, uid):
1456 """parses uid and returns a tuple of real name and email address"""
1458 (name, address) = email.Utils.parseaddr(uid)
1459 name = re.sub(r"\s*[(].*[)]", "", name)
1460 name = self.de_escape_gpg_str(name)
1463 return (name, address)
1465 def load_keys(self, keyring):
1466 if not self.keyring_id:
1467 raise Exception('Must be initialized with database information')
1469 k = os.popen(self.gpg_invocation % keyring, "r")
1473 for line in k.xreadlines():
1474 field = line.split(":")
1475 if field[0] == "pub":
1478 (name, addr) = self.parse_address(field[9])
1480 self.keys[key]["email"] = addr
1481 self.keys[key]["name"] = name
1482 self.keys[key]["fingerprints"] = []
1484 elif key and field[0] == "sub" and len(field) >= 12:
1485 signingkey = ("s" in field[11])
1486 elif key and field[0] == "uid":
1487 (name, addr) = self.parse_address(field[9])
1488 if "email" not in self.keys[key] and "@" in addr:
1489 self.keys[key]["email"] = addr
1490 self.keys[key]["name"] = name
1491 elif signingkey and field[0] == "fpr":
1492 self.keys[key]["fingerprints"].append(field[9])
1493 self.fpr_lookup[field[9]] = key
1495 def import_users_from_ldap(self, session):
1499 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1500 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1502 l = ldap.open(LDAPServer)
1503 l.simple_bind_s("","")
1504 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1505 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1506 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1508 ldap_fin_uid_id = {}
1515 uid = entry["uid"][0]
1516 name = get_ldap_name(entry)
1517 fingerprints = entry["keyFingerPrint"]
1519 for f in fingerprints:
1520 key = self.fpr_lookup.get(f, None)
1521 if key not in self.keys:
1523 self.keys[key]["uid"] = uid
1527 keyid = get_or_set_uid(uid, session).uid_id
1528 byuid[keyid] = (uid, name)
1529 byname[uid] = (keyid, name)
1531 return (byname, byuid)
1533 def generate_users_from_keyring(self, format, session):
1537 for x in self.keys.keys():
1538 if "email" not in self.keys[x]:
1540 self.keys[x]["uid"] = format % "invalid-uid"
1542 uid = format % self.keys[x]["email"]
1543 keyid = get_or_set_uid(uid, session).uid_id
1544 byuid[keyid] = (uid, self.keys[x]["name"])
1545 byname[uid] = (keyid, self.keys[x]["name"])
1546 self.keys[x]["uid"] = uid
1549 uid = format % "invalid-uid"
1550 keyid = get_or_set_uid(uid, session).uid_id
1551 byuid[keyid] = (uid, "ungeneratable user id")
1552 byname[uid] = (keyid, "ungeneratable user id")
1554 return (byname, byuid)
1556 __all__.append('Keyring')
1559 def get_keyring(keyring, session=None):
1561 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1562 If C{keyring} already has an entry, simply return the existing Keyring
1564 @type keyring: string
1565 @param keyring: the keyring name
1568 @return: the Keyring object for this keyring
1571 q = session.query(Keyring).filter_by(keyring_name=keyring)
1575 except NoResultFound:
1578 __all__.append('get_keyring')
1580 ################################################################################
1582 class KeyringACLMap(object):
1583 def __init__(self, *args, **kwargs):
1587 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1589 __all__.append('KeyringACLMap')
1591 ################################################################################
1593 class DBChange(object):
1594 def __init__(self, *args, **kwargs):
1598 return '<DBChange %s>' % self.changesname
1600 def clean_from_queue(self):
1601 session = DBConn().session().object_session(self)
1603 # Remove changes_pool_files entries
1606 # Remove changes_pending_files references
1609 # Clear out of queue
1610 self.in_queue = None
1611 self.approved_for_id = None
1613 __all__.append('DBChange')
1616 def get_dbchange(filename, session=None):
1618 returns DBChange object for given C{filename}.
1620 @type filename: string
1621 @param filename: the name of the file
1623 @type session: Session
1624 @param session: Optional SQLA session object (a temporary one will be
1625 generated if not supplied)
1628 @return: DBChange object for the given filename (C{None} if not present)
1631 q = session.query(DBChange).filter_by(changesname=filename)
1635 except NoResultFound:
1638 __all__.append('get_dbchange')
1640 ################################################################################
1642 class Location(ORMObject):
1643 def __init__(self, path = None, component = None):
1645 self.component = component
1646 # the column 'type' should go away, see comment at mapper
1647 self.archive_type = 'pool'
1649 def properties(self):
1650 return ['path', 'location_id', 'archive_type', 'component', \
1653 def not_null_constraints(self):
1654 return ['path', 'archive_type']
1656 __all__.append('Location')
1659 def get_location(location, component=None, archive=None, session=None):
1661 Returns Location object for the given combination of location, component
1664 @type location: string
1665 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1667 @type component: string
1668 @param component: the component name (if None, no restriction applied)
1670 @type archive: string
1671 @param archive: the archive name (if None, no restriction applied)
1673 @rtype: Location / None
1674 @return: Either a Location object or None if one can't be found
1677 q = session.query(Location).filter_by(path=location)
1679 if archive is not None:
1680 q = q.join(Archive).filter_by(archive_name=archive)
1682 if component is not None:
1683 q = q.join(Component).filter_by(component_name=component)
1687 except NoResultFound:
1690 __all__.append('get_location')
1692 ################################################################################
1694 class Maintainer(ORMObject):
1695 def __init__(self, name = None):
1698 def properties(self):
1699 return ['name', 'maintainer_id']
1701 def not_null_constraints(self):
1704 def get_split_maintainer(self):
1705 if not hasattr(self, 'name') or self.name is None:
1706 return ('', '', '', '')
1708 return fix_maintainer(self.name.strip())
1710 __all__.append('Maintainer')
1713 def get_or_set_maintainer(name, session=None):
1715 Returns Maintainer object for given maintainer name.
1717 If no matching maintainer name is found, a row is inserted.
1720 @param name: The maintainer name to add
1722 @type session: SQLAlchemy
1723 @param session: Optional SQL session object (a temporary one will be
1724 generated if not supplied). If not passed, a commit will be performed at
1725 the end of the function, otherwise the caller is responsible for commiting.
1726 A flush will be performed either way.
1729 @return: the Maintainer object for the given maintainer
1732 q = session.query(Maintainer).filter_by(name=name)
1735 except NoResultFound:
1736 maintainer = Maintainer()
1737 maintainer.name = name
1738 session.add(maintainer)
1739 session.commit_or_flush()
1744 __all__.append('get_or_set_maintainer')
1747 def get_maintainer(maintainer_id, session=None):
1749 Return the name of the maintainer behind C{maintainer_id} or None if that
1750 maintainer_id is invalid.
1752 @type maintainer_id: int
1753 @param maintainer_id: the id of the maintainer
1756 @return: the Maintainer with this C{maintainer_id}
1759 return session.query(Maintainer).get(maintainer_id)
1761 __all__.append('get_maintainer')
1763 ################################################################################
1765 class NewComment(object):
1766 def __init__(self, *args, **kwargs):
1770 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1772 __all__.append('NewComment')
1775 def has_new_comment(package, version, session=None):
1777 Returns true if the given combination of C{package}, C{version} has a comment.
1779 @type package: string
1780 @param package: name of the package
1782 @type version: string
1783 @param version: package version
1785 @type session: Session
1786 @param session: Optional SQLA session object (a temporary one will be
1787 generated if not supplied)
1793 q = session.query(NewComment)
1794 q = q.filter_by(package=package)
1795 q = q.filter_by(version=version)
1797 return bool(q.count() > 0)
1799 __all__.append('has_new_comment')
1802 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1804 Returns (possibly empty) list of NewComment objects for the given
1807 @type package: string (optional)
1808 @param package: name of the package
1810 @type version: string (optional)
1811 @param version: package version
1813 @type comment_id: int (optional)
1814 @param comment_id: An id of a comment
1816 @type session: Session
1817 @param session: Optional SQLA session object (a temporary one will be
1818 generated if not supplied)
1821 @return: A (possibly empty) list of NewComment objects will be returned
1824 q = session.query(NewComment)
1825 if package is not None: q = q.filter_by(package=package)
1826 if version is not None: q = q.filter_by(version=version)
1827 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1831 __all__.append('get_new_comments')
1833 ################################################################################
1835 class Override(ORMObject):
1836 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1837 section = None, priority = None):
1838 self.package = package
1840 self.component = component
1841 self.overridetype = overridetype
1842 self.section = section
1843 self.priority = priority
1845 def properties(self):
1846 return ['package', 'suite', 'component', 'overridetype', 'section', \
1849 def not_null_constraints(self):
1850 return ['package', 'suite', 'component', 'overridetype', 'section']
1852 __all__.append('Override')
1855 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1857 Returns Override object for the given parameters
1859 @type package: string
1860 @param package: The name of the package
1862 @type suite: string, list or None
1863 @param suite: The name of the suite (or suites if a list) to limit to. If
1864 None, don't limit. Defaults to None.
1866 @type component: string, list or None
1867 @param component: The name of the component (or components if a list) to
1868 limit to. If None, don't limit. Defaults to None.
1870 @type overridetype: string, list or None
1871 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1872 limit to. If None, don't limit. Defaults to None.
1874 @type session: Session
1875 @param session: Optional SQLA session object (a temporary one will be
1876 generated if not supplied)
1879 @return: A (possibly empty) list of Override objects will be returned
1882 q = session.query(Override)
1883 q = q.filter_by(package=package)
1885 if suite is not None:
1886 if not isinstance(suite, list): suite = [suite]
1887 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1889 if component is not None:
1890 if not isinstance(component, list): component = [component]
1891 q = q.join(Component).filter(Component.component_name.in_(component))
1893 if overridetype is not None:
1894 if not isinstance(overridetype, list): overridetype = [overridetype]
1895 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1899 __all__.append('get_override')
1902 ################################################################################
1904 class OverrideType(ORMObject):
1905 def __init__(self, overridetype = None):
1906 self.overridetype = overridetype
1908 def properties(self):
1909 return ['overridetype', 'overridetype_id', 'overrides_count']
1911 def not_null_constraints(self):
1912 return ['overridetype']
1914 __all__.append('OverrideType')
1917 def get_override_type(override_type, session=None):
1919 Returns OverrideType object for given C{override type}.
1921 @type override_type: string
1922 @param override_type: The name of the override type
1924 @type session: Session
1925 @param session: Optional SQLA session object (a temporary one will be
1926 generated if not supplied)
1929 @return: the database id for the given override type
1932 q = session.query(OverrideType).filter_by(overridetype=override_type)
1936 except NoResultFound:
1939 __all__.append('get_override_type')
1941 ################################################################################
1943 class DebContents(object):
1944 def __init__(self, *args, **kwargs):
1948 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1950 __all__.append('DebContents')
1953 class UdebContents(object):
1954 def __init__(self, *args, **kwargs):
1958 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1960 __all__.append('UdebContents')
1962 class PendingBinContents(object):
1963 def __init__(self, *args, **kwargs):
1967 return '<PendingBinContents %s>' % self.contents_id
1969 __all__.append('PendingBinContents')
1971 def insert_pending_content_paths(package,
1976 Make sure given paths are temporarily associated with given
1980 @param package: the package to associate with should have been read in from the binary control file
1981 @type fullpaths: list
1982 @param fullpaths: the list of paths of the file being associated with the binary
1983 @type session: SQLAlchemy session
1984 @param session: Optional SQLAlchemy session. If this is passed, the caller
1985 is responsible for ensuring a transaction has begun and committing the
1986 results or rolling back based on the result code. If not passed, a commit
1987 will be performed at the end of the function
1989 @return: True upon success, False if there is a problem
1992 privatetrans = False
1995 session = DBConn().session()
1999 arch = get_architecture(package['Architecture'], session)
2000 arch_id = arch.arch_id
2002 # Remove any already existing recorded files for this package
2003 q = session.query(PendingBinContents)
2004 q = q.filter_by(package=package['Package'])
2005 q = q.filter_by(version=package['Version'])
2006 q = q.filter_by(architecture=arch_id)
2009 for fullpath in fullpaths:
2011 if fullpath.startswith( "./" ):
2012 fullpath = fullpath[2:]
2014 pca = PendingBinContents()
2015 pca.package = package['Package']
2016 pca.version = package['Version']
2018 pca.architecture = arch_id
2021 pca.type = 8 # gross
2023 pca.type = 7 # also gross
2026 # Only commit if we set up the session ourself
2034 except Exception, e:
2035 traceback.print_exc()
2037 # Only rollback if we set up the session ourself
2044 __all__.append('insert_pending_content_paths')
2046 ################################################################################
2048 class PolicyQueue(object):
2049 def __init__(self, *args, **kwargs):
2053 return '<PolicyQueue %s>' % self.queue_name
2055 __all__.append('PolicyQueue')
2058 def get_policy_queue(queuename, session=None):
2060 Returns PolicyQueue object for given C{queue name}
2062 @type queuename: string
2063 @param queuename: The name of the queue
2065 @type session: Session
2066 @param session: Optional SQLA session object (a temporary one will be
2067 generated if not supplied)
2070 @return: PolicyQueue object for the given queue
2073 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2077 except NoResultFound:
2080 __all__.append('get_policy_queue')
2083 def get_policy_queue_from_path(pathname, session=None):
2085 Returns PolicyQueue object for given C{path name}
2087 @type queuename: string
2088 @param queuename: The path
2090 @type session: Session
2091 @param session: Optional SQLA session object (a temporary one will be
2092 generated if not supplied)
2095 @return: PolicyQueue object for the given queue
2098 q = session.query(PolicyQueue).filter_by(path=pathname)
2102 except NoResultFound:
2105 __all__.append('get_policy_queue_from_path')
2107 ################################################################################
2109 class Priority(ORMObject):
2110 def __init__(self, priority = None, level = None):
2111 self.priority = priority
2114 def properties(self):
2115 return ['priority', 'priority_id', 'level', 'overrides_count']
2117 def not_null_constraints(self):
2118 return ['priority', 'level']
2120 def __eq__(self, val):
2121 if isinstance(val, str):
2122 return (self.priority == val)
2123 # This signals to use the normal comparison operator
2124 return NotImplemented
2126 def __ne__(self, val):
2127 if isinstance(val, str):
2128 return (self.priority != val)
2129 # This signals to use the normal comparison operator
2130 return NotImplemented
2132 __all__.append('Priority')
2135 def get_priority(priority, session=None):
2137 Returns Priority object for given C{priority name}.
2139 @type priority: string
2140 @param priority: The name of the priority
2142 @type session: Session
2143 @param session: Optional SQLA session object (a temporary one will be
2144 generated if not supplied)
2147 @return: Priority object for the given priority
2150 q = session.query(Priority).filter_by(priority=priority)
2154 except NoResultFound:
2157 __all__.append('get_priority')
2160 def get_priorities(session=None):
2162 Returns dictionary of priority names -> id mappings
2164 @type session: Session
2165 @param session: Optional SQL session object (a temporary one will be
2166 generated if not supplied)
2169 @return: dictionary of priority names -> id mappings
2173 q = session.query(Priority)
2175 ret[x.priority] = x.priority_id
2179 __all__.append('get_priorities')
2181 ################################################################################
2183 class Section(ORMObject):
2184 def __init__(self, section = None):
2185 self.section = section
2187 def properties(self):
2188 return ['section', 'section_id', 'overrides_count']
2190 def not_null_constraints(self):
2193 def __eq__(self, val):
2194 if isinstance(val, str):
2195 return (self.section == val)
2196 # This signals to use the normal comparison operator
2197 return NotImplemented
2199 def __ne__(self, val):
2200 if isinstance(val, str):
2201 return (self.section != val)
2202 # This signals to use the normal comparison operator
2203 return NotImplemented
2205 __all__.append('Section')
2208 def get_section(section, session=None):
2210 Returns Section object for given C{section name}.
2212 @type section: string
2213 @param section: The name of the section
2215 @type session: Session
2216 @param session: Optional SQLA session object (a temporary one will be
2217 generated if not supplied)
2220 @return: Section object for the given section name
2223 q = session.query(Section).filter_by(section=section)
2227 except NoResultFound:
2230 __all__.append('get_section')
2233 def get_sections(session=None):
2235 Returns dictionary of section names -> id mappings
2237 @type session: Session
2238 @param session: Optional SQL session object (a temporary one will be
2239 generated if not supplied)
2242 @return: dictionary of section names -> id mappings
2246 q = session.query(Section)
2248 ret[x.section] = x.section_id
2252 __all__.append('get_sections')
2254 ################################################################################
2256 class DBSource(ORMObject):
2257 def __init__(self, source = None, version = None, maintainer = None, \
2258 changedby = None, poolfile = None, install_date = None):
2259 self.source = source
2260 self.version = version
2261 self.maintainer = maintainer
2262 self.changedby = changedby
2263 self.poolfile = poolfile
2264 self.install_date = install_date
2266 def properties(self):
2267 return ['source', 'source_id', 'maintainer', 'changedby', \
2268 'fingerprint', 'poolfile', 'version', 'suites_count', \
2269 'install_date', 'binaries_count']
2271 def not_null_constraints(self):
2272 return ['source', 'version', 'install_date', 'maintainer', \
2273 'changedby', 'poolfile', 'install_date']
2275 __all__.append('DBSource')
2278 def source_exists(source, source_version, suites = ["any"], session=None):
2280 Ensure that source exists somewhere in the archive for the binary
2281 upload being processed.
2282 1. exact match => 1.0-3
2283 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2285 @type source: string
2286 @param source: source name
2288 @type source_version: string
2289 @param source_version: expected source version
2292 @param suites: list of suites to check in, default I{any}
2294 @type session: Session
2295 @param session: Optional SQLA session object (a temporary one will be
2296 generated if not supplied)
2299 @return: returns 1 if a source with expected version is found, otherwise 0
2306 from daklib.regexes import re_bin_only_nmu
2307 orig_source_version = re_bin_only_nmu.sub('', source_version)
2309 for suite in suites:
2310 q = session.query(DBSource).filter_by(source=source). \
2311 filter(DBSource.version.in_([source_version, orig_source_version]))
2313 # source must exist in suite X, or in some other suite that's
2314 # mapped to X, recursively... silent-maps are counted too,
2315 # unreleased-maps aren't.
2316 maps = cnf.ValueList("SuiteMappings")[:]
2318 maps = [ m.split() for m in maps ]
2319 maps = [ (x[1], x[2]) for x in maps
2320 if x[0] == "map" or x[0] == "silent-map" ]
2322 for (from_, to) in maps:
2323 if from_ in s and to not in s:
2326 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2331 # No source found so return not ok
2336 __all__.append('source_exists')
2339 def get_suites_source_in(source, session=None):
2341 Returns list of Suite objects which given C{source} name is in
2344 @param source: DBSource package name to search for
2347 @return: list of Suite objects for the given source
2350 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2352 __all__.append('get_suites_source_in')
2355 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2357 Returns list of DBSource objects for given C{source} name and other parameters
2360 @param source: DBSource package name to search for
2362 @type version: str or None
2363 @param version: DBSource version name to search for or None if not applicable
2365 @type dm_upload_allowed: bool
2366 @param dm_upload_allowed: If None, no effect. If True or False, only
2367 return packages with that dm_upload_allowed setting
2369 @type session: Session
2370 @param session: Optional SQL session object (a temporary one will be
2371 generated if not supplied)
2374 @return: list of DBSource objects for the given name (may be empty)
2377 q = session.query(DBSource).filter_by(source=source)
2379 if version is not None:
2380 q = q.filter_by(version=version)
2382 if dm_upload_allowed is not None:
2383 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2387 __all__.append('get_sources_from_name')
2389 # FIXME: This function fails badly if it finds more than 1 source package and
2390 # its implementation is trivial enough to be inlined.
2392 def get_source_in_suite(source, suite, session=None):
2394 Returns a DBSource object for a combination of C{source} and C{suite}.
2396 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2397 - B{suite} - a suite name, eg. I{unstable}
2399 @type source: string
2400 @param source: source package name
2403 @param suite: the suite name
2406 @return: the version for I{source} in I{suite}
2410 q = get_suite(suite, session).get_sources(source)
2413 except NoResultFound:
2416 __all__.append('get_source_in_suite')
2418 ################################################################################
2421 def add_dsc_to_db(u, filename, session=None):
2422 entry = u.pkg.files[filename]
2426 source.source = u.pkg.dsc["source"]
2427 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2428 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2429 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2430 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2431 source.install_date = datetime.now().date()
2433 dsc_component = entry["component"]
2434 dsc_location_id = entry["location id"]
2436 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2438 # Set up a new poolfile if necessary
2439 if not entry.has_key("files id") or not entry["files id"]:
2440 filename = entry["pool name"] + filename
2441 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2443 pfs.append(poolfile)
2444 entry["files id"] = poolfile.file_id
2446 source.poolfile_id = entry["files id"]
2449 suite_names = u.pkg.changes["distribution"].keys()
2450 source.suites = session.query(Suite). \
2451 filter(Suite.suite_name.in_(suite_names)).all()
2453 # Add the source files to the DB (files and dsc_files)
2455 dscfile.source_id = source.source_id
2456 dscfile.poolfile_id = entry["files id"]
2457 session.add(dscfile)
2459 for dsc_file, dentry in u.pkg.dsc_files.items():
2461 df.source_id = source.source_id
2463 # If the .orig tarball is already in the pool, it's
2464 # files id is stored in dsc_files by check_dsc().
2465 files_id = dentry.get("files id", None)
2467 # Find the entry in the files hash
2468 # TODO: Bail out here properly
2470 for f, e in u.pkg.files.items():
2475 if files_id is None:
2476 filename = dfentry["pool name"] + dsc_file
2478 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2479 # FIXME: needs to check for -1/-2 and or handle exception
2480 if found and obj is not None:
2481 files_id = obj.file_id
2484 # If still not found, add it
2485 if files_id is None:
2486 # HACK: Force sha1sum etc into dentry
2487 dentry["sha1sum"] = dfentry["sha1sum"]
2488 dentry["sha256sum"] = dfentry["sha256sum"]
2489 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2490 pfs.append(poolfile)
2491 files_id = poolfile.file_id
2493 poolfile = get_poolfile_by_id(files_id, session)
2494 if poolfile is None:
2495 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2496 pfs.append(poolfile)
2498 df.poolfile_id = files_id
2501 # Add the src_uploaders to the DB
2502 uploader_ids = [source.maintainer_id]
2503 if u.pkg.dsc.has_key("uploaders"):
2504 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2506 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2509 for up_id in uploader_ids:
2510 if added_ids.has_key(up_id):
2512 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2518 su.maintainer_id = up_id
2519 su.source_id = source.source_id
2524 return source, dsc_component, dsc_location_id, pfs
2526 __all__.append('add_dsc_to_db')
2529 def add_deb_to_db(u, filename, session=None):
2531 Contrary to what you might expect, this routine deals with both
2532 debs and udebs. That info is in 'dbtype', whilst 'type' is
2533 'deb' for both of them
2536 entry = u.pkg.files[filename]
2539 bin.package = entry["package"]
2540 bin.version = entry["version"]
2541 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2542 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2543 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2544 bin.binarytype = entry["dbtype"]
2547 filename = entry["pool name"] + filename
2548 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2549 if not entry.get("location id", None):
2550 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2552 if entry.get("files id", None):
2553 poolfile = get_poolfile_by_id(bin.poolfile_id)
2554 bin.poolfile_id = entry["files id"]
2556 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2557 bin.poolfile_id = entry["files id"] = poolfile.file_id
2560 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2561 if len(bin_sources) != 1:
2562 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2563 (bin.package, bin.version, entry["architecture"],
2564 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2566 bin.source_id = bin_sources[0].source_id
2568 # Add and flush object so it has an ID
2571 suite_names = u.pkg.changes["distribution"].keys()
2572 bin.suites = session.query(Suite). \
2573 filter(Suite.suite_name.in_(suite_names)).all()
2577 # Deal with contents - disabled for now
2578 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2580 # print "REJECT\nCould not determine contents of package %s" % bin.package
2581 # session.rollback()
2582 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2586 __all__.append('add_deb_to_db')
2588 ################################################################################
2590 class SourceACL(object):
2591 def __init__(self, *args, **kwargs):
2595 return '<SourceACL %s>' % self.source_acl_id
2597 __all__.append('SourceACL')
2599 ################################################################################
2601 class SrcFormat(object):
2602 def __init__(self, *args, **kwargs):
2606 return '<SrcFormat %s>' % (self.format_name)
2608 __all__.append('SrcFormat')
2610 ################################################################################
2612 class SrcUploader(object):
2613 def __init__(self, *args, **kwargs):
2617 return '<SrcUploader %s>' % self.uploader_id
2619 __all__.append('SrcUploader')
2621 ################################################################################
2623 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2624 ('SuiteID', 'suite_id'),
2625 ('Version', 'version'),
2626 ('Origin', 'origin'),
2628 ('Description', 'description'),
2629 ('Untouchable', 'untouchable'),
2630 ('Announce', 'announce'),
2631 ('Codename', 'codename'),
2632 ('OverrideCodename', 'overridecodename'),
2633 ('ValidTime', 'validtime'),
2634 ('Priority', 'priority'),
2635 ('NotAutomatic', 'notautomatic'),
2636 ('CopyChanges', 'copychanges'),
2637 ('OverrideSuite', 'overridesuite')]
2639 # Why the heck don't we have any UNIQUE constraints in table suite?
2640 # TODO: Add UNIQUE constraints for appropriate columns.
2641 class Suite(ORMObject):
2642 def __init__(self, suite_name = None, version = None):
2643 self.suite_name = suite_name
2644 self.version = version
2646 def properties(self):
2647 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2650 def not_null_constraints(self):
2651 return ['suite_name', 'version']
2653 def __eq__(self, val):
2654 if isinstance(val, str):
2655 return (self.suite_name == val)
2656 # This signals to use the normal comparison operator
2657 return NotImplemented
2659 def __ne__(self, val):
2660 if isinstance(val, str):
2661 return (self.suite_name != val)
2662 # This signals to use the normal comparison operator
2663 return NotImplemented
2667 for disp, field in SUITE_FIELDS:
2668 val = getattr(self, field, None)
2670 ret.append("%s: %s" % (disp, val))
2672 return "\n".join(ret)
2674 def get_architectures(self, skipsrc=False, skipall=False):
2676 Returns list of Architecture objects
2678 @type skipsrc: boolean
2679 @param skipsrc: Whether to skip returning the 'source' architecture entry
2682 @type skipall: boolean
2683 @param skipall: Whether to skip returning the 'all' architecture entry
2687 @return: list of Architecture objects for the given name (may be empty)
2690 q = object_session(self).query(Architecture).with_parent(self)
2692 q = q.filter(Architecture.arch_string != 'source')
2694 q = q.filter(Architecture.arch_string != 'all')
2695 return q.order_by(Architecture.arch_string).all()
2697 def get_sources(self, source):
2699 Returns a query object representing DBSource that is part of C{suite}.
2701 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2703 @type source: string
2704 @param source: source package name
2706 @rtype: sqlalchemy.orm.query.Query
2707 @return: a query of DBSource
2711 session = object_session(self)
2712 return session.query(DBSource).filter_by(source = source). \
2715 __all__.append('Suite')
2718 def get_suite(suite, session=None):
2720 Returns Suite object for given C{suite name}.
2723 @param suite: The name of the suite
2725 @type session: Session
2726 @param session: Optional SQLA session object (a temporary one will be
2727 generated if not supplied)
2730 @return: Suite object for the requested suite name (None if not present)
2733 q = session.query(Suite).filter_by(suite_name=suite)
2737 except NoResultFound:
2740 __all__.append('get_suite')
2742 ################################################################################
2744 # TODO: should be removed because the implementation is too trivial
2746 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2748 Returns list of Architecture objects for given C{suite} name
2751 @param suite: Suite name to search for
2753 @type skipsrc: boolean
2754 @param skipsrc: Whether to skip returning the 'source' architecture entry
2757 @type skipall: boolean
2758 @param skipall: Whether to skip returning the 'all' architecture entry
2761 @type session: Session
2762 @param session: Optional SQL session object (a temporary one will be
2763 generated if not supplied)
2766 @return: list of Architecture objects for the given name (may be empty)
2769 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2771 __all__.append('get_suite_architectures')
2773 ################################################################################
2775 class SuiteSrcFormat(object):
2776 def __init__(self, *args, **kwargs):
2780 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2782 __all__.append('SuiteSrcFormat')
2785 def get_suite_src_formats(suite, session=None):
2787 Returns list of allowed SrcFormat for C{suite}.
2790 @param suite: Suite name to search for
2792 @type session: Session
2793 @param session: Optional SQL session object (a temporary one will be
2794 generated if not supplied)
2797 @return: the list of allowed source formats for I{suite}
2800 q = session.query(SrcFormat)
2801 q = q.join(SuiteSrcFormat)
2802 q = q.join(Suite).filter_by(suite_name=suite)
2803 q = q.order_by('format_name')
2807 __all__.append('get_suite_src_formats')
2809 ################################################################################
2811 class Uid(ORMObject):
2812 def __init__(self, uid = None, name = None):
2816 def __eq__(self, val):
2817 if isinstance(val, str):
2818 return (self.uid == val)
2819 # This signals to use the normal comparison operator
2820 return NotImplemented
2822 def __ne__(self, val):
2823 if isinstance(val, str):
2824 return (self.uid != val)
2825 # This signals to use the normal comparison operator
2826 return NotImplemented
2828 def properties(self):
2829 return ['uid', 'name', 'fingerprint']
2831 def not_null_constraints(self):
2834 __all__.append('Uid')
2837 def get_or_set_uid(uidname, session=None):
2839 Returns uid object for given uidname.
2841 If no matching uidname is found, a row is inserted.
2843 @type uidname: string
2844 @param uidname: The uid to add
2846 @type session: SQLAlchemy
2847 @param session: Optional SQL session object (a temporary one will be
2848 generated if not supplied). If not passed, a commit will be performed at
2849 the end of the function, otherwise the caller is responsible for commiting.
2852 @return: the uid object for the given uidname
2855 q = session.query(Uid).filter_by(uid=uidname)
2859 except NoResultFound:
2863 session.commit_or_flush()
2868 __all__.append('get_or_set_uid')
2871 def get_uid_from_fingerprint(fpr, session=None):
2872 q = session.query(Uid)
2873 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2877 except NoResultFound:
2880 __all__.append('get_uid_from_fingerprint')
2882 ################################################################################
2884 class UploadBlock(object):
2885 def __init__(self, *args, **kwargs):
2889 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2891 __all__.append('UploadBlock')
2893 ################################################################################
2895 class DBConn(object):
2897 database module init.
2901 def __init__(self, *args, **kwargs):
2902 self.__dict__ = self.__shared_state
2904 if not getattr(self, 'initialised', False):
2905 self.initialised = True
2906 self.debug = kwargs.has_key('debug')
2909 def __setuptables(self):
2910 tables_with_primary = (
2918 'build_queue_files',
2923 'changes_pending_binaries',
2924 'changes_pending_files',
2925 'changes_pending_source',
2935 'pending_bin_contents',
2949 tables_no_primary = (
2950 'changes_pending_files_map',
2951 'changes_pending_source_files',
2952 'changes_pool_files',
2954 # TODO: the maintainer column in table override should be removed.
2956 'suite_architectures',
2957 'suite_src_formats',
2958 'suite_build_queue_copy',
2963 'almost_obsolete_all_associations',
2964 'almost_obsolete_src_associations',
2965 'any_associations_source',
2966 'bin_assoc_by_arch',
2967 'bin_associations_binaries',
2968 'binaries_suite_arch',
2969 'binfiles_suite_component_arch',
2972 'newest_all_associations',
2973 'newest_any_associations',
2975 'newest_src_association',
2976 'obsolete_all_associations',
2977 'obsolete_any_associations',
2978 'obsolete_any_by_all_associations',
2979 'obsolete_src_associations',
2981 'src_associations_bin',
2982 'src_associations_src',
2983 'suite_arch_by_name',
2986 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2987 # correctly and that is why we have to use a workaround. It can
2988 # be removed as soon as we switch to version 0.6.
2989 for table_name in tables_with_primary:
2990 table = Table(table_name, self.db_meta, \
2991 Column('id', Integer, primary_key = True), \
2992 autoload=True, useexisting=True)
2993 setattr(self, 'tbl_%s' % table_name, table)
2995 for table_name in tables_no_primary:
2996 table = Table(table_name, self.db_meta, autoload=True)
2997 setattr(self, 'tbl_%s' % table_name, table)
2999 # bin_contents needs special attention until update #41 has been
3001 self.tbl_bin_contents = Table('bin_contents', self.db_meta, \
3002 Column('file', Text, primary_key = True),
3003 Column('binary_id', Integer, ForeignKey('binaries.id'), \
3004 primary_key = True),
3005 autoload=True, useexisting=True)
3007 for view_name in views:
3008 view = Table(view_name, self.db_meta, autoload=True)
3009 setattr(self, 'view_%s' % view_name, view)
3011 def __setupmappers(self):
3012 mapper(Architecture, self.tbl_architecture,
3013 properties = dict(arch_id = self.tbl_architecture.c.id,
3014 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3015 order_by='suite_name',
3016 backref=backref('architectures', order_by='arch_string'))),
3017 extension = validator)
3019 mapper(Archive, self.tbl_archive,
3020 properties = dict(archive_id = self.tbl_archive.c.id,
3021 archive_name = self.tbl_archive.c.name))
3023 mapper(PendingBinContents, self.tbl_pending_bin_contents,
3024 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3025 filename = self.tbl_pending_bin_contents.c.filename,
3026 package = self.tbl_pending_bin_contents.c.package,
3027 version = self.tbl_pending_bin_contents.c.version,
3028 arch = self.tbl_pending_bin_contents.c.arch,
3029 otype = self.tbl_pending_bin_contents.c.type))
3031 mapper(DebContents, self.tbl_deb_contents,
3032 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3033 package=self.tbl_deb_contents.c.package,
3034 suite=self.tbl_deb_contents.c.suite,
3035 arch=self.tbl_deb_contents.c.arch,
3036 section=self.tbl_deb_contents.c.section,
3037 filename=self.tbl_deb_contents.c.filename))
3039 mapper(UdebContents, self.tbl_udeb_contents,
3040 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3041 package=self.tbl_udeb_contents.c.package,
3042 suite=self.tbl_udeb_contents.c.suite,
3043 arch=self.tbl_udeb_contents.c.arch,
3044 section=self.tbl_udeb_contents.c.section,
3045 filename=self.tbl_udeb_contents.c.filename))
3047 mapper(BuildQueue, self.tbl_build_queue,
3048 properties = dict(queue_id = self.tbl_build_queue.c.id))
3050 mapper(BuildQueueFile, self.tbl_build_queue_files,
3051 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3052 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3054 mapper(DBBinary, self.tbl_binaries,
3055 properties = dict(binary_id = self.tbl_binaries.c.id,
3056 package = self.tbl_binaries.c.package,
3057 version = self.tbl_binaries.c.version,
3058 maintainer_id = self.tbl_binaries.c.maintainer,
3059 maintainer = relation(Maintainer),
3060 source_id = self.tbl_binaries.c.source,
3061 source = relation(DBSource, backref='binaries'),
3062 arch_id = self.tbl_binaries.c.architecture,
3063 architecture = relation(Architecture),
3064 poolfile_id = self.tbl_binaries.c.file,
3065 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3066 binarytype = self.tbl_binaries.c.type,
3067 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3068 fingerprint = relation(Fingerprint),
3069 install_date = self.tbl_binaries.c.install_date,
3070 suites = relation(Suite, secondary=self.tbl_bin_associations,
3071 backref=backref('binaries', lazy='dynamic'))),
3072 extension = validator)
3074 mapper(BinaryACL, self.tbl_binary_acl,
3075 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3077 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3078 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3079 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3080 architecture = relation(Architecture)))
3082 mapper(Component, self.tbl_component,
3083 properties = dict(component_id = self.tbl_component.c.id,
3084 component_name = self.tbl_component.c.name),
3085 extension = validator)
3087 mapper(DBConfig, self.tbl_config,
3088 properties = dict(config_id = self.tbl_config.c.id))
3090 mapper(DSCFile, self.tbl_dsc_files,
3091 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3092 source_id = self.tbl_dsc_files.c.source,
3093 source = relation(DBSource),
3094 poolfile_id = self.tbl_dsc_files.c.file,
3095 poolfile = relation(PoolFile)))
3097 mapper(PoolFile, self.tbl_files,
3098 properties = dict(file_id = self.tbl_files.c.id,
3099 filesize = self.tbl_files.c.size,
3100 location_id = self.tbl_files.c.location,
3101 location = relation(Location,
3102 # using lazy='dynamic' in the back
3103 # reference because we have A LOT of
3104 # files in one location
3105 backref=backref('files', lazy='dynamic'))),
3106 extension = validator)
3108 mapper(Fingerprint, self.tbl_fingerprint,
3109 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3110 uid_id = self.tbl_fingerprint.c.uid,
3111 uid = relation(Uid),
3112 keyring_id = self.tbl_fingerprint.c.keyring,
3113 keyring = relation(Keyring),
3114 source_acl = relation(SourceACL),
3115 binary_acl = relation(BinaryACL)),
3116 extension = validator)
3118 mapper(Keyring, self.tbl_keyrings,
3119 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3120 keyring_id = self.tbl_keyrings.c.id))
3122 mapper(DBChange, self.tbl_changes,
3123 properties = dict(change_id = self.tbl_changes.c.id,
3124 poolfiles = relation(PoolFile,
3125 secondary=self.tbl_changes_pool_files,
3126 backref="changeslinks"),
3127 seen = self.tbl_changes.c.seen,
3128 source = self.tbl_changes.c.source,
3129 binaries = self.tbl_changes.c.binaries,
3130 architecture = self.tbl_changes.c.architecture,
3131 distribution = self.tbl_changes.c.distribution,
3132 urgency = self.tbl_changes.c.urgency,
3133 maintainer = self.tbl_changes.c.maintainer,
3134 changedby = self.tbl_changes.c.changedby,
3135 date = self.tbl_changes.c.date,
3136 version = self.tbl_changes.c.version,
3137 files = relation(ChangePendingFile,
3138 secondary=self.tbl_changes_pending_files_map,
3139 backref="changesfile"),
3140 in_queue_id = self.tbl_changes.c.in_queue,
3141 in_queue = relation(PolicyQueue,
3142 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3143 approved_for_id = self.tbl_changes.c.approved_for))
3145 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3146 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3148 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3149 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3150 filename = self.tbl_changes_pending_files.c.filename,
3151 size = self.tbl_changes_pending_files.c.size,
3152 md5sum = self.tbl_changes_pending_files.c.md5sum,
3153 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3154 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3156 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3157 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3158 change = relation(DBChange),
3159 maintainer = relation(Maintainer,
3160 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3161 changedby = relation(Maintainer,
3162 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3163 fingerprint = relation(Fingerprint),
3164 source_files = relation(ChangePendingFile,
3165 secondary=self.tbl_changes_pending_source_files,
3166 backref="pending_sources")))
3169 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3170 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3171 keyring = relation(Keyring, backref="keyring_acl_map"),
3172 architecture = relation(Architecture)))
3174 mapper(Location, self.tbl_location,
3175 properties = dict(location_id = self.tbl_location.c.id,
3176 component_id = self.tbl_location.c.component,
3177 component = relation(Component, backref='location'),
3178 archive_id = self.tbl_location.c.archive,
3179 archive = relation(Archive),
3180 # FIXME: the 'type' column is old cruft and
3181 # should be removed in the future.
3182 archive_type = self.tbl_location.c.type),
3183 extension = validator)
3185 mapper(Maintainer, self.tbl_maintainer,
3186 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3187 maintains_sources = relation(DBSource, backref='maintainer',
3188 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3189 changed_sources = relation(DBSource, backref='changedby',
3190 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3191 extension = validator)
3193 mapper(NewComment, self.tbl_new_comments,
3194 properties = dict(comment_id = self.tbl_new_comments.c.id))
3196 mapper(Override, self.tbl_override,
3197 properties = dict(suite_id = self.tbl_override.c.suite,
3198 suite = relation(Suite, \
3199 backref=backref('overrides', lazy='dynamic')),
3200 package = self.tbl_override.c.package,
3201 component_id = self.tbl_override.c.component,
3202 component = relation(Component, \
3203 backref=backref('overrides', lazy='dynamic')),
3204 priority_id = self.tbl_override.c.priority,
3205 priority = relation(Priority, \
3206 backref=backref('overrides', lazy='dynamic')),
3207 section_id = self.tbl_override.c.section,
3208 section = relation(Section, \
3209 backref=backref('overrides', lazy='dynamic')),
3210 overridetype_id = self.tbl_override.c.type,
3211 overridetype = relation(OverrideType, \
3212 backref=backref('overrides', lazy='dynamic'))))
3214 mapper(OverrideType, self.tbl_override_type,
3215 properties = dict(overridetype = self.tbl_override_type.c.type,
3216 overridetype_id = self.tbl_override_type.c.id))
3218 mapper(PolicyQueue, self.tbl_policy_queue,
3219 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3221 mapper(Priority, self.tbl_priority,
3222 properties = dict(priority_id = self.tbl_priority.c.id))
3224 mapper(Section, self.tbl_section,
3225 properties = dict(section_id = self.tbl_section.c.id,
3226 section=self.tbl_section.c.section))
3228 mapper(DBSource, self.tbl_source,
3229 properties = dict(source_id = self.tbl_source.c.id,
3230 version = self.tbl_source.c.version,
3231 maintainer_id = self.tbl_source.c.maintainer,
3232 poolfile_id = self.tbl_source.c.file,
3233 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3234 fingerprint_id = self.tbl_source.c.sig_fpr,
3235 fingerprint = relation(Fingerprint),
3236 changedby_id = self.tbl_source.c.changedby,
3237 srcfiles = relation(DSCFile,
3238 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3239 suites = relation(Suite, secondary=self.tbl_src_associations,
3240 backref=backref('sources', lazy='dynamic')),
3241 srcuploaders = relation(SrcUploader)),
3242 extension = validator)
3244 mapper(SourceACL, self.tbl_source_acl,
3245 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3247 mapper(SrcFormat, self.tbl_src_format,
3248 properties = dict(src_format_id = self.tbl_src_format.c.id,
3249 format_name = self.tbl_src_format.c.format_name))
3251 mapper(SrcUploader, self.tbl_src_uploaders,
3252 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3253 source_id = self.tbl_src_uploaders.c.source,
3254 source = relation(DBSource,
3255 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3256 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3257 maintainer = relation(Maintainer,
3258 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3260 mapper(Suite, self.tbl_suite,
3261 properties = dict(suite_id = self.tbl_suite.c.id,
3262 policy_queue = relation(PolicyQueue),
3263 copy_queues = relation(BuildQueue,
3264 secondary=self.tbl_suite_build_queue_copy)),
3265 extension = validator)
3267 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3268 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3269 suite = relation(Suite, backref='suitesrcformats'),
3270 src_format_id = self.tbl_suite_src_formats.c.src_format,
3271 src_format = relation(SrcFormat)))
3273 mapper(Uid, self.tbl_uid,
3274 properties = dict(uid_id = self.tbl_uid.c.id,
3275 fingerprint = relation(Fingerprint)),
3276 extension = validator)
3278 mapper(UploadBlock, self.tbl_upload_blocks,
3279 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3280 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3281 uid = relation(Uid, backref="uploadblocks")))
3283 mapper(BinContents, self.tbl_bin_contents,
3285 binary = relation(DBBinary,
3286 backref=backref('contents', lazy='dynamic')),
3287 file = self.tbl_bin_contents.c.file))
3289 ## Connection functions
3290 def __createconn(self):
3291 from config import Config
3295 connstr = "postgres://%s" % cnf["DB::Host"]
3296 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3297 connstr += ":%s" % cnf["DB::Port"]
3298 connstr += "/%s" % cnf["DB::Name"]
3301 connstr = "postgres:///%s" % cnf["DB::Name"]
3302 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3303 connstr += "?port=%s" % cnf["DB::Port"]
3305 self.db_pg = create_engine(connstr, echo=self.debug)
3306 self.db_meta = MetaData()
3307 self.db_meta.bind = self.db_pg
3308 self.db_smaker = sessionmaker(bind=self.db_pg,
3312 self.__setuptables()
3313 self.__setupmappers()
3316 return self.db_smaker()
3318 __all__.append('DBConn')