5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE, object_mapper
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
277 def get(cls, primary_key, session = None):
279 This is a support function that allows getting an object by its primary
282 Architecture.get(3[, session])
284 instead of the more verbose
286 session.query(Architecture).get(3)
288 return session.query(cls).get(primary_key)
290 def session(self, replace = False):
292 Returns the current session that is associated with the object. May
293 return None is object is in detached state.
296 return object_session(self)
298 def clone(self, session = None):
300 Clones the current object in a new session and returns the new clone. A
301 fresh session is created if the optional session parameter is not
304 RATIONALE: SQLAlchemy's session is not thread safe. This method allows
305 cloning of an existing object to allow several threads to work with
306 their own instances of an ORMObject.
308 WARNING: Only persistent (committed) objects can be cloned.
312 session = DBConn().session()
313 if self.session() is None:
314 raise RuntimeError('Method clone() failed for detached object:\n%s' %
316 self.session().flush()
317 mapper = object_mapper(self)
318 primary_key = mapper.primary_key_from_instance(self)
319 object_class = self.__class__
320 new_object = session.query(object_class).get(primary_key)
321 if new_object is None:
322 raise RuntimeError( \
323 'Method clone() failed for non-persistent object:\n%s' % self)
326 __all__.append('ORMObject')
328 ################################################################################
330 class Validator(MapperExtension):
332 This class calls the validate() method for each instance for the
333 'before_update' and 'before_insert' events. A global object validator is
334 used for configuring the individual mappers.
337 def before_update(self, mapper, connection, instance):
341 def before_insert(self, mapper, connection, instance):
345 validator = Validator()
347 ################################################################################
349 class Architecture(ORMObject):
350 def __init__(self, arch_string = None, description = None):
351 self.arch_string = arch_string
352 self.description = description
354 def __eq__(self, val):
355 if isinstance(val, str):
356 return (self.arch_string== val)
357 # This signals to use the normal comparison operator
358 return NotImplemented
360 def __ne__(self, val):
361 if isinstance(val, str):
362 return (self.arch_string != val)
363 # This signals to use the normal comparison operator
364 return NotImplemented
366 def properties(self):
367 return ['arch_string', 'arch_id', 'suites_count']
369 def not_null_constraints(self):
370 return ['arch_string']
372 __all__.append('Architecture')
375 def get_architecture(architecture, session=None):
377 Returns database id for given C{architecture}.
379 @type architecture: string
380 @param architecture: The name of the architecture
382 @type session: Session
383 @param session: Optional SQLA session object (a temporary one will be
384 generated if not supplied)
387 @return: Architecture object for the given arch (None if not present)
390 q = session.query(Architecture).filter_by(arch_string=architecture)
394 except NoResultFound:
397 __all__.append('get_architecture')
399 # TODO: should be removed because the implementation is too trivial
401 def get_architecture_suites(architecture, session=None):
403 Returns list of Suite objects for given C{architecture} name
405 @type architecture: str
406 @param architecture: Architecture name to search for
408 @type session: Session
409 @param session: Optional SQL session object (a temporary one will be
410 generated if not supplied)
413 @return: list of Suite objects for the given name (may be empty)
416 return get_architecture(architecture, session).suites
418 __all__.append('get_architecture_suites')
420 ################################################################################
422 class Archive(object):
423 def __init__(self, *args, **kwargs):
427 return '<Archive %s>' % self.archive_name
429 __all__.append('Archive')
432 def get_archive(archive, session=None):
434 returns database id for given C{archive}.
436 @type archive: string
437 @param archive: the name of the arhive
439 @type session: Session
440 @param session: Optional SQLA session object (a temporary one will be
441 generated if not supplied)
444 @return: Archive object for the given name (None if not present)
447 archive = archive.lower()
449 q = session.query(Archive).filter_by(archive_name=archive)
453 except NoResultFound:
456 __all__.append('get_archive')
458 ################################################################################
460 class BinContents(object):
461 def __init__(self, *args, **kwargs):
465 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
467 __all__.append('BinContents')
469 ################################################################################
471 class DBBinary(ORMObject):
472 def __init__(self, package = None, source = None, version = None, \
473 maintainer = None, architecture = None, poolfile = None, \
475 self.package = package
477 self.version = version
478 self.maintainer = maintainer
479 self.architecture = architecture
480 self.poolfile = poolfile
481 self.binarytype = binarytype
483 def properties(self):
484 return ['package', 'version', 'maintainer', 'source', 'architecture', \
485 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
486 'suites_count', 'binary_id']
488 def not_null_constraints(self):
489 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
492 def get_component_name(self):
493 return self.poolfile.location.component.component_name
495 __all__.append('DBBinary')
498 def get_suites_binary_in(package, session=None):
500 Returns list of Suite objects which given C{package} name is in
503 @param package: DBBinary package name to search for
506 @return: list of Suite objects for the given package
509 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
511 __all__.append('get_suites_binary_in')
514 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
516 Returns the component name of the newest binary package in suite_list or
517 None if no package is found. The result can be optionally filtered by a list
518 of architecture names.
521 @param package: DBBinary package name to search for
523 @type suite_list: list of str
524 @param suite_list: list of suite_name items
526 @type arch_list: list of str
527 @param arch_list: optional list of arch_string items that defaults to []
529 @rtype: str or NoneType
530 @return: name of component or None
533 q = session.query(DBBinary).filter_by(package = package). \
534 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
535 if len(arch_list) > 0:
536 q = q.join(DBBinary.architecture). \
537 filter(Architecture.arch_string.in_(arch_list))
538 binary = q.order_by(desc(DBBinary.version)).first()
542 return binary.get_component_name()
544 __all__.append('get_component_by_package_suite')
546 ################################################################################
548 class BinaryACL(object):
549 def __init__(self, *args, **kwargs):
553 return '<BinaryACL %s>' % self.binary_acl_id
555 __all__.append('BinaryACL')
557 ################################################################################
559 class BinaryACLMap(object):
560 def __init__(self, *args, **kwargs):
564 return '<BinaryACLMap %s>' % self.binary_acl_map_id
566 __all__.append('BinaryACLMap')
568 ################################################################################
573 ArchiveDir "%(archivepath)s";
574 OverrideDir "%(overridedir)s";
575 CacheDir "%(cachedir)s";
580 Packages::Compress ". bzip2 gzip";
581 Sources::Compress ". bzip2 gzip";
586 bindirectory "incoming"
591 BinOverride "override.sid.all3";
592 BinCacheDB "packages-accepted.db";
594 FileList "%(filelist)s";
597 Packages::Extensions ".deb .udeb";
600 bindirectory "incoming/"
603 BinOverride "override.sid.all3";
604 SrcOverride "override.sid.all3.src";
605 FileList "%(filelist)s";
609 class BuildQueue(object):
610 def __init__(self, *args, **kwargs):
614 return '<BuildQueue %s>' % self.queue_name
616 def write_metadata(self, starttime, force=False):
617 # Do we write out metafiles?
618 if not (force or self.generate_metadata):
621 session = DBConn().session().object_session(self)
623 fl_fd = fl_name = ac_fd = ac_name = None
625 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
626 startdir = os.getcwd()
629 # Grab files we want to include
630 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
631 # Write file list with newer files
632 (fl_fd, fl_name) = mkstemp()
634 os.write(fl_fd, '%s\n' % n.fullpath)
639 # Write minimal apt.conf
640 # TODO: Remove hardcoding from template
641 (ac_fd, ac_name) = mkstemp()
642 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
644 'cachedir': cnf["Dir::Cache"],
645 'overridedir': cnf["Dir::Override"],
649 # Run apt-ftparchive generate
650 os.chdir(os.path.dirname(ac_name))
651 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
653 # Run apt-ftparchive release
654 # TODO: Eww - fix this
655 bname = os.path.basename(self.path)
659 # We have to remove the Release file otherwise it'll be included in the
662 os.unlink(os.path.join(bname, 'Release'))
666 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
668 # Crude hack with open and append, but this whole section is and should be redone.
669 if self.notautomatic:
670 release=open("Release", "a")
671 release.write("NotAutomatic: yes")
676 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
677 if cnf.has_key("Dinstall::SigningPubKeyring"):
678 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
680 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
682 # Move the files if we got this far
683 os.rename('Release', os.path.join(bname, 'Release'))
685 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
687 # Clean up any left behind files
714 def clean_and_update(self, starttime, Logger, dryrun=False):
715 """WARNING: This routine commits for you"""
716 session = DBConn().session().object_session(self)
718 if self.generate_metadata and not dryrun:
719 self.write_metadata(starttime)
721 # Grab files older than our execution time
722 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
728 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
730 Logger.log(["I: Removing %s from the queue" % o.fullpath])
731 os.unlink(o.fullpath)
734 # If it wasn't there, don't worry
735 if e.errno == ENOENT:
738 # TODO: Replace with proper logging call
739 Logger.log(["E: Could not remove %s" % o.fullpath])
746 for f in os.listdir(self.path):
747 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
751 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
752 except NoResultFound:
753 fp = os.path.join(self.path, f)
755 Logger.log(["I: Would remove unused link %s" % fp])
757 Logger.log(["I: Removing unused link %s" % fp])
761 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
763 def add_file_from_pool(self, poolfile):
764 """Copies a file into the pool. Assumes that the PoolFile object is
765 attached to the same SQLAlchemy session as the Queue object is.
767 The caller is responsible for committing after calling this function."""
768 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
770 # Check if we have a file of this name or this ID already
771 for f in self.queuefiles:
772 if f.fileid is not None and f.fileid == poolfile.file_id or \
773 f.poolfile.filename == poolfile_basename:
774 # In this case, update the BuildQueueFile entry so we
775 # don't remove it too early
776 f.lastused = datetime.now()
777 DBConn().session().object_session(poolfile).add(f)
780 # Prepare BuildQueueFile object
781 qf = BuildQueueFile()
782 qf.build_queue_id = self.queue_id
783 qf.lastused = datetime.now()
784 qf.filename = poolfile_basename
786 targetpath = poolfile.fullpath
787 queuepath = os.path.join(self.path, poolfile_basename)
791 # We need to copy instead of symlink
793 utils.copy(targetpath, queuepath)
794 # NULL in the fileid field implies a copy
797 os.symlink(targetpath, queuepath)
798 qf.fileid = poolfile.file_id
802 # Get the same session as the PoolFile is using and add the qf to it
803 DBConn().session().object_session(poolfile).add(qf)
808 __all__.append('BuildQueue')
811 def get_build_queue(queuename, session=None):
813 Returns BuildQueue object for given C{queue name}, creating it if it does not
816 @type queuename: string
817 @param queuename: The name of the queue
819 @type session: Session
820 @param session: Optional SQLA session object (a temporary one will be
821 generated if not supplied)
824 @return: BuildQueue object for the given queue
827 q = session.query(BuildQueue).filter_by(queue_name=queuename)
831 except NoResultFound:
834 __all__.append('get_build_queue')
836 ################################################################################
838 class BuildQueueFile(object):
839 def __init__(self, *args, **kwargs):
843 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
847 return os.path.join(self.buildqueue.path, self.filename)
850 __all__.append('BuildQueueFile')
852 ################################################################################
854 class ChangePendingBinary(object):
855 def __init__(self, *args, **kwargs):
859 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
861 __all__.append('ChangePendingBinary')
863 ################################################################################
865 class ChangePendingFile(object):
866 def __init__(self, *args, **kwargs):
870 return '<ChangePendingFile %s>' % self.change_pending_file_id
872 __all__.append('ChangePendingFile')
874 ################################################################################
876 class ChangePendingSource(object):
877 def __init__(self, *args, **kwargs):
881 return '<ChangePendingSource %s>' % self.change_pending_source_id
883 __all__.append('ChangePendingSource')
885 ################################################################################
887 class Component(ORMObject):
888 def __init__(self, component_name = None):
889 self.component_name = component_name
891 def __eq__(self, val):
892 if isinstance(val, str):
893 return (self.component_name == val)
894 # This signals to use the normal comparison operator
895 return NotImplemented
897 def __ne__(self, val):
898 if isinstance(val, str):
899 return (self.component_name != val)
900 # This signals to use the normal comparison operator
901 return NotImplemented
903 def properties(self):
904 return ['component_name', 'component_id', 'description', 'location', \
907 def not_null_constraints(self):
908 return ['component_name']
911 __all__.append('Component')
914 def get_component(component, session=None):
916 Returns database id for given C{component}.
918 @type component: string
919 @param component: The name of the override type
922 @return: the database id for the given component
925 component = component.lower()
927 q = session.query(Component).filter_by(component_name=component)
931 except NoResultFound:
934 __all__.append('get_component')
936 ################################################################################
938 class DBConfig(object):
939 def __init__(self, *args, **kwargs):
943 return '<DBConfig %s>' % self.name
945 __all__.append('DBConfig')
947 ################################################################################
950 def get_or_set_contents_file_id(filename, session=None):
952 Returns database id for given filename.
954 If no matching file is found, a row is inserted.
956 @type filename: string
957 @param filename: The filename
958 @type session: SQLAlchemy
959 @param session: Optional SQL session object (a temporary one will be
960 generated if not supplied). If not passed, a commit will be performed at
961 the end of the function, otherwise the caller is responsible for commiting.
964 @return: the database id for the given component
967 q = session.query(ContentFilename).filter_by(filename=filename)
970 ret = q.one().cafilename_id
971 except NoResultFound:
972 cf = ContentFilename()
973 cf.filename = filename
975 session.commit_or_flush()
976 ret = cf.cafilename_id
980 __all__.append('get_or_set_contents_file_id')
983 def get_contents(suite, overridetype, section=None, session=None):
985 Returns contents for a suite / overridetype combination, limiting
986 to a section if not None.
989 @param suite: Suite object
991 @type overridetype: OverrideType
992 @param overridetype: OverrideType object
994 @type section: Section
995 @param section: Optional section object to limit results to
997 @type session: SQLAlchemy
998 @param session: Optional SQL session object (a temporary one will be
999 generated if not supplied)
1001 @rtype: ResultsProxy
1002 @return: ResultsProxy object set up to return tuples of (filename, section,
1006 # find me all of the contents for a given suite
1007 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1011 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1012 JOIN content_file_names n ON (c.filename=n.id)
1013 JOIN binaries b ON (b.id=c.binary_pkg)
1014 JOIN override o ON (o.package=b.package)
1015 JOIN section s ON (s.id=o.section)
1016 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1017 AND b.type=:overridetypename"""
1019 vals = {'suiteid': suite.suite_id,
1020 'overridetypeid': overridetype.overridetype_id,
1021 'overridetypename': overridetype.overridetype}
1023 if section is not None:
1024 contents_q += " AND s.id = :sectionid"
1025 vals['sectionid'] = section.section_id
1027 contents_q += " ORDER BY fn"
1029 return session.execute(contents_q, vals)
1031 __all__.append('get_contents')
1033 ################################################################################
1035 class ContentFilepath(object):
1036 def __init__(self, *args, **kwargs):
1040 return '<ContentFilepath %s>' % self.filepath
1042 __all__.append('ContentFilepath')
1045 def get_or_set_contents_path_id(filepath, session=None):
1047 Returns database id for given path.
1049 If no matching file is found, a row is inserted.
1051 @type filepath: string
1052 @param filepath: The filepath
1054 @type session: SQLAlchemy
1055 @param session: Optional SQL session object (a temporary one will be
1056 generated if not supplied). If not passed, a commit will be performed at
1057 the end of the function, otherwise the caller is responsible for commiting.
1060 @return: the database id for the given path
1063 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1066 ret = q.one().cafilepath_id
1067 except NoResultFound:
1068 cf = ContentFilepath()
1069 cf.filepath = filepath
1071 session.commit_or_flush()
1072 ret = cf.cafilepath_id
1076 __all__.append('get_or_set_contents_path_id')
1078 ################################################################################
1080 class ContentAssociation(object):
1081 def __init__(self, *args, **kwargs):
1085 return '<ContentAssociation %s>' % self.ca_id
1087 __all__.append('ContentAssociation')
1089 def insert_content_paths(binary_id, fullpaths, session=None):
1091 Make sure given path is associated with given binary id
1093 @type binary_id: int
1094 @param binary_id: the id of the binary
1095 @type fullpaths: list
1096 @param fullpaths: the list of paths of the file being associated with the binary
1097 @type session: SQLAlchemy session
1098 @param session: Optional SQLAlchemy session. If this is passed, the caller
1099 is responsible for ensuring a transaction has begun and committing the
1100 results or rolling back based on the result code. If not passed, a commit
1101 will be performed at the end of the function, otherwise the caller is
1102 responsible for commiting.
1104 @return: True upon success
1107 privatetrans = False
1109 session = DBConn().session()
1114 def generate_path_dicts():
1115 for fullpath in fullpaths:
1116 if fullpath.startswith( './' ):
1117 fullpath = fullpath[2:]
1119 yield {'filename':fullpath, 'id': binary_id }
1121 for d in generate_path_dicts():
1122 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1131 traceback.print_exc()
1133 # Only rollback if we set up the session ourself
1140 __all__.append('insert_content_paths')
1142 ################################################################################
1144 class DSCFile(object):
1145 def __init__(self, *args, **kwargs):
1149 return '<DSCFile %s>' % self.dscfile_id
1151 __all__.append('DSCFile')
1154 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1156 Returns a list of DSCFiles which may be empty
1158 @type dscfile_id: int (optional)
1159 @param dscfile_id: the dscfile_id of the DSCFiles to find
1161 @type source_id: int (optional)
1162 @param source_id: the source id related to the DSCFiles to find
1164 @type poolfile_id: int (optional)
1165 @param poolfile_id: the poolfile id related to the DSCFiles to find
1168 @return: Possibly empty list of DSCFiles
1171 q = session.query(DSCFile)
1173 if dscfile_id is not None:
1174 q = q.filter_by(dscfile_id=dscfile_id)
1176 if source_id is not None:
1177 q = q.filter_by(source_id=source_id)
1179 if poolfile_id is not None:
1180 q = q.filter_by(poolfile_id=poolfile_id)
1184 __all__.append('get_dscfiles')
1186 ################################################################################
1188 class PoolFile(ORMObject):
1189 def __init__(self, filename = None, location = None, filesize = -1, \
1191 self.filename = filename
1192 self.location = location
1193 self.filesize = filesize
1194 self.md5sum = md5sum
1198 return os.path.join(self.location.path, self.filename)
1200 def is_valid(self, filesize = -1, md5sum = None):
1201 return self.filesize == long(filesize) and self.md5sum == md5sum
1203 def properties(self):
1204 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1205 'sha256sum', 'location', 'source', 'binary', 'last_used']
1207 def not_null_constraints(self):
1208 return ['filename', 'md5sum', 'location']
1210 __all__.append('PoolFile')
1213 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1216 (ValidFileFound [boolean], PoolFile object or None)
1218 @type filename: string
1219 @param filename: the filename of the file to check against the DB
1222 @param filesize: the size of the file to check against the DB
1224 @type md5sum: string
1225 @param md5sum: the md5sum of the file to check against the DB
1227 @type location_id: int
1228 @param location_id: the id of the location to look in
1231 @return: Tuple of length 2.
1232 - If valid pool file found: (C{True}, C{PoolFile object})
1233 - If valid pool file not found:
1234 - (C{False}, C{None}) if no file found
1235 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1238 poolfile = session.query(Location).get(location_id). \
1239 files.filter_by(filename=filename).first()
1241 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1244 return (valid, poolfile)
1246 __all__.append('check_poolfile')
1248 # TODO: the implementation can trivially be inlined at the place where the
1249 # function is called
1251 def get_poolfile_by_id(file_id, session=None):
1253 Returns a PoolFile objects or None for the given id
1256 @param file_id: the id of the file to look for
1258 @rtype: PoolFile or None
1259 @return: either the PoolFile object or None
1262 return session.query(PoolFile).get(file_id)
1264 __all__.append('get_poolfile_by_id')
1267 def get_poolfile_like_name(filename, session=None):
1269 Returns an array of PoolFile objects which are like the given name
1271 @type filename: string
1272 @param filename: the filename of the file to check against the DB
1275 @return: array of PoolFile objects
1278 # TODO: There must be a way of properly using bind parameters with %FOO%
1279 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1283 __all__.append('get_poolfile_like_name')
1286 def add_poolfile(filename, datadict, location_id, session=None):
1288 Add a new file to the pool
1290 @type filename: string
1291 @param filename: filename
1293 @type datadict: dict
1294 @param datadict: dict with needed data
1296 @type location_id: int
1297 @param location_id: database id of the location
1300 @return: the PoolFile object created
1302 poolfile = PoolFile()
1303 poolfile.filename = filename
1304 poolfile.filesize = datadict["size"]
1305 poolfile.md5sum = datadict["md5sum"]
1306 poolfile.sha1sum = datadict["sha1sum"]
1307 poolfile.sha256sum = datadict["sha256sum"]
1308 poolfile.location_id = location_id
1310 session.add(poolfile)
1311 # Flush to get a file id (NB: This is not a commit)
1316 __all__.append('add_poolfile')
1318 ################################################################################
1320 class Fingerprint(ORMObject):
1321 def __init__(self, fingerprint = None):
1322 self.fingerprint = fingerprint
1324 def properties(self):
1325 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1328 def not_null_constraints(self):
1329 return ['fingerprint']
1331 __all__.append('Fingerprint')
1334 def get_fingerprint(fpr, session=None):
1336 Returns Fingerprint object for given fpr.
1339 @param fpr: The fpr to find / add
1341 @type session: SQLAlchemy
1342 @param session: Optional SQL session object (a temporary one will be
1343 generated if not supplied).
1346 @return: the Fingerprint object for the given fpr or None
1349 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1353 except NoResultFound:
1358 __all__.append('get_fingerprint')
1361 def get_or_set_fingerprint(fpr, session=None):
1363 Returns Fingerprint object for given fpr.
1365 If no matching fpr is found, a row is inserted.
1368 @param fpr: The fpr to find / add
1370 @type session: SQLAlchemy
1371 @param session: Optional SQL session object (a temporary one will be
1372 generated if not supplied). If not passed, a commit will be performed at
1373 the end of the function, otherwise the caller is responsible for commiting.
1374 A flush will be performed either way.
1377 @return: the Fingerprint object for the given fpr
1380 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1384 except NoResultFound:
1385 fingerprint = Fingerprint()
1386 fingerprint.fingerprint = fpr
1387 session.add(fingerprint)
1388 session.commit_or_flush()
1393 __all__.append('get_or_set_fingerprint')
1395 ################################################################################
1397 # Helper routine for Keyring class
1398 def get_ldap_name(entry):
1400 for k in ["cn", "mn", "sn"]:
1402 if ret and ret[0] != "" and ret[0] != "-":
1404 return " ".join(name)
1406 ################################################################################
1408 class Keyring(object):
1409 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1410 " --with-colons --fingerprint --fingerprint"
1415 def __init__(self, *args, **kwargs):
1419 return '<Keyring %s>' % self.keyring_name
1421 def de_escape_gpg_str(self, txt):
1422 esclist = re.split(r'(\\x..)', txt)
1423 for x in range(1,len(esclist),2):
1424 esclist[x] = "%c" % (int(esclist[x][2:],16))
1425 return "".join(esclist)
1427 def parse_address(self, uid):
1428 """parses uid and returns a tuple of real name and email address"""
1430 (name, address) = email.Utils.parseaddr(uid)
1431 name = re.sub(r"\s*[(].*[)]", "", name)
1432 name = self.de_escape_gpg_str(name)
1435 return (name, address)
1437 def load_keys(self, keyring):
1438 if not self.keyring_id:
1439 raise Exception('Must be initialized with database information')
1441 k = os.popen(self.gpg_invocation % keyring, "r")
1445 for line in k.xreadlines():
1446 field = line.split(":")
1447 if field[0] == "pub":
1450 (name, addr) = self.parse_address(field[9])
1452 self.keys[key]["email"] = addr
1453 self.keys[key]["name"] = name
1454 self.keys[key]["fingerprints"] = []
1456 elif key and field[0] == "sub" and len(field) >= 12:
1457 signingkey = ("s" in field[11])
1458 elif key and field[0] == "uid":
1459 (name, addr) = self.parse_address(field[9])
1460 if "email" not in self.keys[key] and "@" in addr:
1461 self.keys[key]["email"] = addr
1462 self.keys[key]["name"] = name
1463 elif signingkey and field[0] == "fpr":
1464 self.keys[key]["fingerprints"].append(field[9])
1465 self.fpr_lookup[field[9]] = key
1467 def import_users_from_ldap(self, session):
1471 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1472 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1474 l = ldap.open(LDAPServer)
1475 l.simple_bind_s("","")
1476 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1477 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1478 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1480 ldap_fin_uid_id = {}
1487 uid = entry["uid"][0]
1488 name = get_ldap_name(entry)
1489 fingerprints = entry["keyFingerPrint"]
1491 for f in fingerprints:
1492 key = self.fpr_lookup.get(f, None)
1493 if key not in self.keys:
1495 self.keys[key]["uid"] = uid
1499 keyid = get_or_set_uid(uid, session).uid_id
1500 byuid[keyid] = (uid, name)
1501 byname[uid] = (keyid, name)
1503 return (byname, byuid)
1505 def generate_users_from_keyring(self, format, session):
1509 for x in self.keys.keys():
1510 if "email" not in self.keys[x]:
1512 self.keys[x]["uid"] = format % "invalid-uid"
1514 uid = format % self.keys[x]["email"]
1515 keyid = get_or_set_uid(uid, session).uid_id
1516 byuid[keyid] = (uid, self.keys[x]["name"])
1517 byname[uid] = (keyid, self.keys[x]["name"])
1518 self.keys[x]["uid"] = uid
1521 uid = format % "invalid-uid"
1522 keyid = get_or_set_uid(uid, session).uid_id
1523 byuid[keyid] = (uid, "ungeneratable user id")
1524 byname[uid] = (keyid, "ungeneratable user id")
1526 return (byname, byuid)
1528 __all__.append('Keyring')
1531 def get_keyring(keyring, session=None):
1533 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1534 If C{keyring} already has an entry, simply return the existing Keyring
1536 @type keyring: string
1537 @param keyring: the keyring name
1540 @return: the Keyring object for this keyring
1543 q = session.query(Keyring).filter_by(keyring_name=keyring)
1547 except NoResultFound:
1550 __all__.append('get_keyring')
1552 ################################################################################
1554 class KeyringACLMap(object):
1555 def __init__(self, *args, **kwargs):
1559 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1561 __all__.append('KeyringACLMap')
1563 ################################################################################
1565 class DBChange(object):
1566 def __init__(self, *args, **kwargs):
1570 return '<DBChange %s>' % self.changesname
1572 def clean_from_queue(self):
1573 session = DBConn().session().object_session(self)
1575 # Remove changes_pool_files entries
1578 # Remove changes_pending_files references
1581 # Clear out of queue
1582 self.in_queue = None
1583 self.approved_for_id = None
1585 __all__.append('DBChange')
1588 def get_dbchange(filename, session=None):
1590 returns DBChange object for given C{filename}.
1592 @type filename: string
1593 @param filename: the name of the file
1595 @type session: Session
1596 @param session: Optional SQLA session object (a temporary one will be
1597 generated if not supplied)
1600 @return: DBChange object for the given filename (C{None} if not present)
1603 q = session.query(DBChange).filter_by(changesname=filename)
1607 except NoResultFound:
1610 __all__.append('get_dbchange')
1612 ################################################################################
1614 class Location(ORMObject):
1615 def __init__(self, path = None, component = None):
1617 self.component = component
1618 # the column 'type' should go away, see comment at mapper
1619 self.archive_type = 'pool'
1621 def properties(self):
1622 return ['path', 'location_id', 'archive_type', 'component', \
1625 def not_null_constraints(self):
1626 return ['path', 'archive_type']
1628 __all__.append('Location')
1631 def get_location(location, component=None, archive=None, session=None):
1633 Returns Location object for the given combination of location, component
1636 @type location: string
1637 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1639 @type component: string
1640 @param component: the component name (if None, no restriction applied)
1642 @type archive: string
1643 @param archive: the archive name (if None, no restriction applied)
1645 @rtype: Location / None
1646 @return: Either a Location object or None if one can't be found
1649 q = session.query(Location).filter_by(path=location)
1651 if archive is not None:
1652 q = q.join(Archive).filter_by(archive_name=archive)
1654 if component is not None:
1655 q = q.join(Component).filter_by(component_name=component)
1659 except NoResultFound:
1662 __all__.append('get_location')
1664 ################################################################################
1666 class Maintainer(ORMObject):
1667 def __init__(self, name = None):
1670 def properties(self):
1671 return ['name', 'maintainer_id']
1673 def not_null_constraints(self):
1676 def get_split_maintainer(self):
1677 if not hasattr(self, 'name') or self.name is None:
1678 return ('', '', '', '')
1680 return fix_maintainer(self.name.strip())
1682 __all__.append('Maintainer')
1685 def get_or_set_maintainer(name, session=None):
1687 Returns Maintainer object for given maintainer name.
1689 If no matching maintainer name is found, a row is inserted.
1692 @param name: The maintainer name to add
1694 @type session: SQLAlchemy
1695 @param session: Optional SQL session object (a temporary one will be
1696 generated if not supplied). If not passed, a commit will be performed at
1697 the end of the function, otherwise the caller is responsible for commiting.
1698 A flush will be performed either way.
1701 @return: the Maintainer object for the given maintainer
1704 q = session.query(Maintainer).filter_by(name=name)
1707 except NoResultFound:
1708 maintainer = Maintainer()
1709 maintainer.name = name
1710 session.add(maintainer)
1711 session.commit_or_flush()
1716 __all__.append('get_or_set_maintainer')
1719 def get_maintainer(maintainer_id, session=None):
1721 Return the name of the maintainer behind C{maintainer_id} or None if that
1722 maintainer_id is invalid.
1724 @type maintainer_id: int
1725 @param maintainer_id: the id of the maintainer
1728 @return: the Maintainer with this C{maintainer_id}
1731 return session.query(Maintainer).get(maintainer_id)
1733 __all__.append('get_maintainer')
1735 ################################################################################
1737 class NewComment(object):
1738 def __init__(self, *args, **kwargs):
1742 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1744 __all__.append('NewComment')
1747 def has_new_comment(package, version, session=None):
1749 Returns true if the given combination of C{package}, C{version} has a comment.
1751 @type package: string
1752 @param package: name of the package
1754 @type version: string
1755 @param version: package version
1757 @type session: Session
1758 @param session: Optional SQLA session object (a temporary one will be
1759 generated if not supplied)
1765 q = session.query(NewComment)
1766 q = q.filter_by(package=package)
1767 q = q.filter_by(version=version)
1769 return bool(q.count() > 0)
1771 __all__.append('has_new_comment')
1774 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1776 Returns (possibly empty) list of NewComment objects for the given
1779 @type package: string (optional)
1780 @param package: name of the package
1782 @type version: string (optional)
1783 @param version: package version
1785 @type comment_id: int (optional)
1786 @param comment_id: An id of a comment
1788 @type session: Session
1789 @param session: Optional SQLA session object (a temporary one will be
1790 generated if not supplied)
1793 @return: A (possibly empty) list of NewComment objects will be returned
1796 q = session.query(NewComment)
1797 if package is not None: q = q.filter_by(package=package)
1798 if version is not None: q = q.filter_by(version=version)
1799 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1803 __all__.append('get_new_comments')
1805 ################################################################################
1807 class Override(object):
1808 def __init__(self, *args, **kwargs):
1812 return '<Override %s (%s)>' % (self.package, self.suite_id)
1814 __all__.append('Override')
1817 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1819 Returns Override object for the given parameters
1821 @type package: string
1822 @param package: The name of the package
1824 @type suite: string, list or None
1825 @param suite: The name of the suite (or suites if a list) to limit to. If
1826 None, don't limit. Defaults to None.
1828 @type component: string, list or None
1829 @param component: The name of the component (or components if a list) to
1830 limit to. If None, don't limit. Defaults to None.
1832 @type overridetype: string, list or None
1833 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1834 limit to. If None, don't limit. Defaults to None.
1836 @type session: Session
1837 @param session: Optional SQLA session object (a temporary one will be
1838 generated if not supplied)
1841 @return: A (possibly empty) list of Override objects will be returned
1844 q = session.query(Override)
1845 q = q.filter_by(package=package)
1847 if suite is not None:
1848 if not isinstance(suite, list): suite = [suite]
1849 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1851 if component is not None:
1852 if not isinstance(component, list): component = [component]
1853 q = q.join(Component).filter(Component.component_name.in_(component))
1855 if overridetype is not None:
1856 if not isinstance(overridetype, list): overridetype = [overridetype]
1857 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1861 __all__.append('get_override')
1864 ################################################################################
1866 class OverrideType(object):
1867 def __init__(self, *args, **kwargs):
1871 return '<OverrideType %s>' % self.overridetype
1873 __all__.append('OverrideType')
1876 def get_override_type(override_type, session=None):
1878 Returns OverrideType object for given C{override type}.
1880 @type override_type: string
1881 @param override_type: The name of the override type
1883 @type session: Session
1884 @param session: Optional SQLA session object (a temporary one will be
1885 generated if not supplied)
1888 @return: the database id for the given override type
1891 q = session.query(OverrideType).filter_by(overridetype=override_type)
1895 except NoResultFound:
1898 __all__.append('get_override_type')
1900 ################################################################################
1902 class DebContents(object):
1903 def __init__(self, *args, **kwargs):
1907 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1909 __all__.append('DebContents')
1912 class UdebContents(object):
1913 def __init__(self, *args, **kwargs):
1917 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1919 __all__.append('UdebContents')
1921 class PendingBinContents(object):
1922 def __init__(self, *args, **kwargs):
1926 return '<PendingBinContents %s>' % self.contents_id
1928 __all__.append('PendingBinContents')
1930 def insert_pending_content_paths(package,
1935 Make sure given paths are temporarily associated with given
1939 @param package: the package to associate with should have been read in from the binary control file
1940 @type fullpaths: list
1941 @param fullpaths: the list of paths of the file being associated with the binary
1942 @type session: SQLAlchemy session
1943 @param session: Optional SQLAlchemy session. If this is passed, the caller
1944 is responsible for ensuring a transaction has begun and committing the
1945 results or rolling back based on the result code. If not passed, a commit
1946 will be performed at the end of the function
1948 @return: True upon success, False if there is a problem
1951 privatetrans = False
1954 session = DBConn().session()
1958 arch = get_architecture(package['Architecture'], session)
1959 arch_id = arch.arch_id
1961 # Remove any already existing recorded files for this package
1962 q = session.query(PendingBinContents)
1963 q = q.filter_by(package=package['Package'])
1964 q = q.filter_by(version=package['Version'])
1965 q = q.filter_by(architecture=arch_id)
1968 for fullpath in fullpaths:
1970 if fullpath.startswith( "./" ):
1971 fullpath = fullpath[2:]
1973 pca = PendingBinContents()
1974 pca.package = package['Package']
1975 pca.version = package['Version']
1977 pca.architecture = arch_id
1980 pca.type = 8 # gross
1982 pca.type = 7 # also gross
1985 # Only commit if we set up the session ourself
1993 except Exception, e:
1994 traceback.print_exc()
1996 # Only rollback if we set up the session ourself
2003 __all__.append('insert_pending_content_paths')
2005 ################################################################################
2007 class PolicyQueue(object):
2008 def __init__(self, *args, **kwargs):
2012 return '<PolicyQueue %s>' % self.queue_name
2014 __all__.append('PolicyQueue')
2017 def get_policy_queue(queuename, session=None):
2019 Returns PolicyQueue object for given C{queue name}
2021 @type queuename: string
2022 @param queuename: The name of the queue
2024 @type session: Session
2025 @param session: Optional SQLA session object (a temporary one will be
2026 generated if not supplied)
2029 @return: PolicyQueue object for the given queue
2032 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2036 except NoResultFound:
2039 __all__.append('get_policy_queue')
2042 def get_policy_queue_from_path(pathname, session=None):
2044 Returns PolicyQueue object for given C{path name}
2046 @type queuename: string
2047 @param queuename: The path
2049 @type session: Session
2050 @param session: Optional SQLA session object (a temporary one will be
2051 generated if not supplied)
2054 @return: PolicyQueue object for the given queue
2057 q = session.query(PolicyQueue).filter_by(path=pathname)
2061 except NoResultFound:
2064 __all__.append('get_policy_queue_from_path')
2066 ################################################################################
2068 class Priority(object):
2069 def __init__(self, *args, **kwargs):
2072 def __eq__(self, val):
2073 if isinstance(val, str):
2074 return (self.priority == val)
2075 # This signals to use the normal comparison operator
2076 return NotImplemented
2078 def __ne__(self, val):
2079 if isinstance(val, str):
2080 return (self.priority != val)
2081 # This signals to use the normal comparison operator
2082 return NotImplemented
2085 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2087 __all__.append('Priority')
2090 def get_priority(priority, session=None):
2092 Returns Priority object for given C{priority name}.
2094 @type priority: string
2095 @param priority: The name of the priority
2097 @type session: Session
2098 @param session: Optional SQLA session object (a temporary one will be
2099 generated if not supplied)
2102 @return: Priority object for the given priority
2105 q = session.query(Priority).filter_by(priority=priority)
2109 except NoResultFound:
2112 __all__.append('get_priority')
2115 def get_priorities(session=None):
2117 Returns dictionary of priority names -> id mappings
2119 @type session: Session
2120 @param session: Optional SQL session object (a temporary one will be
2121 generated if not supplied)
2124 @return: dictionary of priority names -> id mappings
2128 q = session.query(Priority)
2130 ret[x.priority] = x.priority_id
2134 __all__.append('get_priorities')
2136 ################################################################################
2138 class Section(object):
2139 def __init__(self, *args, **kwargs):
2142 def __eq__(self, val):
2143 if isinstance(val, str):
2144 return (self.section == val)
2145 # This signals to use the normal comparison operator
2146 return NotImplemented
2148 def __ne__(self, val):
2149 if isinstance(val, str):
2150 return (self.section != val)
2151 # This signals to use the normal comparison operator
2152 return NotImplemented
2155 return '<Section %s>' % self.section
2157 __all__.append('Section')
2160 def get_section(section, session=None):
2162 Returns Section object for given C{section name}.
2164 @type section: string
2165 @param section: The name of the section
2167 @type session: Session
2168 @param session: Optional SQLA session object (a temporary one will be
2169 generated if not supplied)
2172 @return: Section object for the given section name
2175 q = session.query(Section).filter_by(section=section)
2179 except NoResultFound:
2182 __all__.append('get_section')
2185 def get_sections(session=None):
2187 Returns dictionary of section names -> id mappings
2189 @type session: Session
2190 @param session: Optional SQL session object (a temporary one will be
2191 generated if not supplied)
2194 @return: dictionary of section names -> id mappings
2198 q = session.query(Section)
2200 ret[x.section] = x.section_id
2204 __all__.append('get_sections')
2206 ################################################################################
2208 class DBSource(ORMObject):
2209 def __init__(self, source = None, version = None, maintainer = None, \
2210 changedby = None, poolfile = None, install_date = None):
2211 self.source = source
2212 self.version = version
2213 self.maintainer = maintainer
2214 self.changedby = changedby
2215 self.poolfile = poolfile
2216 self.install_date = install_date
2218 def properties(self):
2219 return ['source', 'source_id', 'maintainer', 'changedby', \
2220 'fingerprint', 'poolfile', 'version', 'suites_count', \
2221 'install_date', 'binaries_count']
2223 def not_null_constraints(self):
2224 return ['source', 'version', 'install_date', 'maintainer', \
2225 'changedby', 'poolfile', 'install_date']
2227 __all__.append('DBSource')
2230 def source_exists(source, source_version, suites = ["any"], session=None):
2232 Ensure that source exists somewhere in the archive for the binary
2233 upload being processed.
2234 1. exact match => 1.0-3
2235 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2237 @type source: string
2238 @param source: source name
2240 @type source_version: string
2241 @param source_version: expected source version
2244 @param suites: list of suites to check in, default I{any}
2246 @type session: Session
2247 @param session: Optional SQLA session object (a temporary one will be
2248 generated if not supplied)
2251 @return: returns 1 if a source with expected version is found, otherwise 0
2258 from daklib.regexes import re_bin_only_nmu
2259 orig_source_version = re_bin_only_nmu.sub('', source_version)
2261 for suite in suites:
2262 q = session.query(DBSource).filter_by(source=source). \
2263 filter(DBSource.version.in_([source_version, orig_source_version]))
2265 # source must exist in suite X, or in some other suite that's
2266 # mapped to X, recursively... silent-maps are counted too,
2267 # unreleased-maps aren't.
2268 maps = cnf.ValueList("SuiteMappings")[:]
2270 maps = [ m.split() for m in maps ]
2271 maps = [ (x[1], x[2]) for x in maps
2272 if x[0] == "map" or x[0] == "silent-map" ]
2275 if x[1] in s and x[0] not in s:
2278 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2283 # No source found so return not ok
2288 __all__.append('source_exists')
2291 def get_suites_source_in(source, session=None):
2293 Returns list of Suite objects which given C{source} name is in
2296 @param source: DBSource package name to search for
2299 @return: list of Suite objects for the given source
2302 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2304 __all__.append('get_suites_source_in')
2307 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2309 Returns list of DBSource objects for given C{source} name and other parameters
2312 @param source: DBSource package name to search for
2314 @type version: str or None
2315 @param version: DBSource version name to search for or None if not applicable
2317 @type dm_upload_allowed: bool
2318 @param dm_upload_allowed: If None, no effect. If True or False, only
2319 return packages with that dm_upload_allowed setting
2321 @type session: Session
2322 @param session: Optional SQL session object (a temporary one will be
2323 generated if not supplied)
2326 @return: list of DBSource objects for the given name (may be empty)
2329 q = session.query(DBSource).filter_by(source=source)
2331 if version is not None:
2332 q = q.filter_by(version=version)
2334 if dm_upload_allowed is not None:
2335 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2339 __all__.append('get_sources_from_name')
2341 # FIXME: This function fails badly if it finds more than 1 source package and
2342 # its implementation is trivial enough to be inlined.
2344 def get_source_in_suite(source, suite, session=None):
2346 Returns a DBSource object for a combination of C{source} and C{suite}.
2348 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2349 - B{suite} - a suite name, eg. I{unstable}
2351 @type source: string
2352 @param source: source package name
2355 @param suite: the suite name
2358 @return: the version for I{source} in I{suite}
2362 q = get_suite(suite, session).get_sources(source)
2365 except NoResultFound:
2368 __all__.append('get_source_in_suite')
2370 ################################################################################
2373 def add_dsc_to_db(u, filename, session=None):
2374 entry = u.pkg.files[filename]
2378 source.source = u.pkg.dsc["source"]
2379 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2380 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2381 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2382 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2383 source.install_date = datetime.now().date()
2385 dsc_component = entry["component"]
2386 dsc_location_id = entry["location id"]
2388 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2390 # Set up a new poolfile if necessary
2391 if not entry.has_key("files id") or not entry["files id"]:
2392 filename = entry["pool name"] + filename
2393 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2395 pfs.append(poolfile)
2396 entry["files id"] = poolfile.file_id
2398 source.poolfile_id = entry["files id"]
2401 suite_names = u.pkg.changes["distribution"].keys()
2402 source.suites = session.query(Suite). \
2403 filter(Suite.suite_name.in_(suite_names)).all()
2405 # Add the source files to the DB (files and dsc_files)
2407 dscfile.source_id = source.source_id
2408 dscfile.poolfile_id = entry["files id"]
2409 session.add(dscfile)
2411 for dsc_file, dentry in u.pkg.dsc_files.items():
2413 df.source_id = source.source_id
2415 # If the .orig tarball is already in the pool, it's
2416 # files id is stored in dsc_files by check_dsc().
2417 files_id = dentry.get("files id", None)
2419 # Find the entry in the files hash
2420 # TODO: Bail out here properly
2422 for f, e in u.pkg.files.items():
2427 if files_id is None:
2428 filename = dfentry["pool name"] + dsc_file
2430 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2431 # FIXME: needs to check for -1/-2 and or handle exception
2432 if found and obj is not None:
2433 files_id = obj.file_id
2436 # If still not found, add it
2437 if files_id is None:
2438 # HACK: Force sha1sum etc into dentry
2439 dentry["sha1sum"] = dfentry["sha1sum"]
2440 dentry["sha256sum"] = dfentry["sha256sum"]
2441 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2442 pfs.append(poolfile)
2443 files_id = poolfile.file_id
2445 poolfile = get_poolfile_by_id(files_id, session)
2446 if poolfile is None:
2447 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2448 pfs.append(poolfile)
2450 df.poolfile_id = files_id
2453 # Add the src_uploaders to the DB
2454 uploader_ids = [source.maintainer_id]
2455 if u.pkg.dsc.has_key("uploaders"):
2456 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2458 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2461 for up_id in uploader_ids:
2462 if added_ids.has_key(up_id):
2464 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2470 su.maintainer_id = up_id
2471 su.source_id = source.source_id
2476 return source, dsc_component, dsc_location_id, pfs
2478 __all__.append('add_dsc_to_db')
2481 def add_deb_to_db(u, filename, session=None):
2483 Contrary to what you might expect, this routine deals with both
2484 debs and udebs. That info is in 'dbtype', whilst 'type' is
2485 'deb' for both of them
2488 entry = u.pkg.files[filename]
2491 bin.package = entry["package"]
2492 bin.version = entry["version"]
2493 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2494 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2495 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2496 bin.binarytype = entry["dbtype"]
2499 filename = entry["pool name"] + filename
2500 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2501 if not entry.get("location id", None):
2502 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2504 if entry.get("files id", None):
2505 poolfile = get_poolfile_by_id(bin.poolfile_id)
2506 bin.poolfile_id = entry["files id"]
2508 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2509 bin.poolfile_id = entry["files id"] = poolfile.file_id
2512 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2513 if len(bin_sources) != 1:
2514 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2515 (bin.package, bin.version, entry["architecture"],
2516 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2518 bin.source_id = bin_sources[0].source_id
2520 # Add and flush object so it has an ID
2523 suite_names = u.pkg.changes["distribution"].keys()
2524 bin.suites = session.query(Suite). \
2525 filter(Suite.suite_name.in_(suite_names)).all()
2529 # Deal with contents - disabled for now
2530 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2532 # print "REJECT\nCould not determine contents of package %s" % bin.package
2533 # session.rollback()
2534 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2538 __all__.append('add_deb_to_db')
2540 ################################################################################
2542 class SourceACL(object):
2543 def __init__(self, *args, **kwargs):
2547 return '<SourceACL %s>' % self.source_acl_id
2549 __all__.append('SourceACL')
2551 ################################################################################
2553 class SrcFormat(object):
2554 def __init__(self, *args, **kwargs):
2558 return '<SrcFormat %s>' % (self.format_name)
2560 __all__.append('SrcFormat')
2562 ################################################################################
2564 class SrcUploader(object):
2565 def __init__(self, *args, **kwargs):
2569 return '<SrcUploader %s>' % self.uploader_id
2571 __all__.append('SrcUploader')
2573 ################################################################################
2575 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2576 ('SuiteID', 'suite_id'),
2577 ('Version', 'version'),
2578 ('Origin', 'origin'),
2580 ('Description', 'description'),
2581 ('Untouchable', 'untouchable'),
2582 ('Announce', 'announce'),
2583 ('Codename', 'codename'),
2584 ('OverrideCodename', 'overridecodename'),
2585 ('ValidTime', 'validtime'),
2586 ('Priority', 'priority'),
2587 ('NotAutomatic', 'notautomatic'),
2588 ('CopyChanges', 'copychanges'),
2589 ('OverrideSuite', 'overridesuite')]
2591 # Why the heck don't we have any UNIQUE constraints in table suite?
2592 # TODO: Add UNIQUE constraints for appropriate columns.
2593 class Suite(ORMObject):
2594 def __init__(self, suite_name = None, version = None):
2595 self.suite_name = suite_name
2596 self.version = version
2598 def properties(self):
2599 return ['suite_name', 'version', 'sources_count', 'binaries_count']
2601 def not_null_constraints(self):
2602 return ['suite_name', 'version']
2604 def __eq__(self, val):
2605 if isinstance(val, str):
2606 return (self.suite_name == val)
2607 # This signals to use the normal comparison operator
2608 return NotImplemented
2610 def __ne__(self, val):
2611 if isinstance(val, str):
2612 return (self.suite_name != val)
2613 # This signals to use the normal comparison operator
2614 return NotImplemented
2618 for disp, field in SUITE_FIELDS:
2619 val = getattr(self, field, None)
2621 ret.append("%s: %s" % (disp, val))
2623 return "\n".join(ret)
2625 def get_architectures(self, skipsrc=False, skipall=False):
2627 Returns list of Architecture objects
2629 @type skipsrc: boolean
2630 @param skipsrc: Whether to skip returning the 'source' architecture entry
2633 @type skipall: boolean
2634 @param skipall: Whether to skip returning the 'all' architecture entry
2638 @return: list of Architecture objects for the given name (may be empty)
2641 q = object_session(self).query(Architecture).with_parent(self)
2643 q = q.filter(Architecture.arch_string != 'source')
2645 q = q.filter(Architecture.arch_string != 'all')
2646 return q.order_by(Architecture.arch_string).all()
2648 def get_sources(self, source):
2650 Returns a query object representing DBSource that is part of C{suite}.
2652 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2654 @type source: string
2655 @param source: source package name
2657 @rtype: sqlalchemy.orm.query.Query
2658 @return: a query of DBSource
2662 session = object_session(self)
2663 return session.query(DBSource).filter_by(source = source). \
2666 __all__.append('Suite')
2669 def get_suite(suite, session=None):
2671 Returns Suite object for given C{suite name}.
2674 @param suite: The name of the suite
2676 @type session: Session
2677 @param session: Optional SQLA session object (a temporary one will be
2678 generated if not supplied)
2681 @return: Suite object for the requested suite name (None if not present)
2684 q = session.query(Suite).filter_by(suite_name=suite)
2688 except NoResultFound:
2691 __all__.append('get_suite')
2693 ################################################################################
2695 # TODO: should be removed because the implementation is too trivial
2697 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2699 Returns list of Architecture objects for given C{suite} name
2702 @param suite: Suite name to search for
2704 @type skipsrc: boolean
2705 @param skipsrc: Whether to skip returning the 'source' architecture entry
2708 @type skipall: boolean
2709 @param skipall: Whether to skip returning the 'all' architecture entry
2712 @type session: Session
2713 @param session: Optional SQL session object (a temporary one will be
2714 generated if not supplied)
2717 @return: list of Architecture objects for the given name (may be empty)
2720 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2722 __all__.append('get_suite_architectures')
2724 ################################################################################
2726 class SuiteSrcFormat(object):
2727 def __init__(self, *args, **kwargs):
2731 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2733 __all__.append('SuiteSrcFormat')
2736 def get_suite_src_formats(suite, session=None):
2738 Returns list of allowed SrcFormat for C{suite}.
2741 @param suite: Suite name to search for
2743 @type session: Session
2744 @param session: Optional SQL session object (a temporary one will be
2745 generated if not supplied)
2748 @return: the list of allowed source formats for I{suite}
2751 q = session.query(SrcFormat)
2752 q = q.join(SuiteSrcFormat)
2753 q = q.join(Suite).filter_by(suite_name=suite)
2754 q = q.order_by('format_name')
2758 __all__.append('get_suite_src_formats')
2760 ################################################################################
2762 class Uid(ORMObject):
2763 def __init__(self, uid = None, name = None):
2767 def __eq__(self, val):
2768 if isinstance(val, str):
2769 return (self.uid == val)
2770 # This signals to use the normal comparison operator
2771 return NotImplemented
2773 def __ne__(self, val):
2774 if isinstance(val, str):
2775 return (self.uid != val)
2776 # This signals to use the normal comparison operator
2777 return NotImplemented
2779 def properties(self):
2780 return ['uid', 'name', 'fingerprint']
2782 def not_null_constraints(self):
2785 __all__.append('Uid')
2788 def get_or_set_uid(uidname, session=None):
2790 Returns uid object for given uidname.
2792 If no matching uidname is found, a row is inserted.
2794 @type uidname: string
2795 @param uidname: The uid to add
2797 @type session: SQLAlchemy
2798 @param session: Optional SQL session object (a temporary one will be
2799 generated if not supplied). If not passed, a commit will be performed at
2800 the end of the function, otherwise the caller is responsible for commiting.
2803 @return: the uid object for the given uidname
2806 q = session.query(Uid).filter_by(uid=uidname)
2810 except NoResultFound:
2814 session.commit_or_flush()
2819 __all__.append('get_or_set_uid')
2822 def get_uid_from_fingerprint(fpr, session=None):
2823 q = session.query(Uid)
2824 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2828 except NoResultFound:
2831 __all__.append('get_uid_from_fingerprint')
2833 ################################################################################
2835 class UploadBlock(object):
2836 def __init__(self, *args, **kwargs):
2840 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2842 __all__.append('UploadBlock')
2844 ################################################################################
2846 class DBConn(object):
2848 database module init.
2852 def __init__(self, *args, **kwargs):
2853 self.__dict__ = self.__shared_state
2855 if not getattr(self, 'initialised', False):
2856 self.initialised = True
2857 self.debug = kwargs.has_key('debug')
2860 def __setuptables(self):
2861 tables_with_primary = (
2872 'changes_pending_binaries',
2873 'changes_pending_files',
2874 'changes_pending_source',
2884 'pending_bin_contents',
2896 # The following tables have primary keys but sqlalchemy
2897 # version 0.5 fails to reflect them correctly with database
2898 # versions before upgrade #41.
2900 #'build_queue_files',
2903 tables_no_primary = (
2905 'changes_pending_files_map',
2906 'changes_pending_source_files',
2907 'changes_pool_files',
2910 'suite_architectures',
2911 'suite_src_formats',
2912 'suite_build_queue_copy',
2914 # see the comment above
2916 'build_queue_files',
2920 'almost_obsolete_all_associations',
2921 'almost_obsolete_src_associations',
2922 'any_associations_source',
2923 'bin_assoc_by_arch',
2924 'bin_associations_binaries',
2925 'binaries_suite_arch',
2926 'binfiles_suite_component_arch',
2929 'newest_all_associations',
2930 'newest_any_associations',
2932 'newest_src_association',
2933 'obsolete_all_associations',
2934 'obsolete_any_associations',
2935 'obsolete_any_by_all_associations',
2936 'obsolete_src_associations',
2938 'src_associations_bin',
2939 'src_associations_src',
2940 'suite_arch_by_name',
2943 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2944 # correctly and that is why we have to use a workaround. It can
2945 # be removed as soon as we switch to version 0.6.
2946 for table_name in tables_with_primary:
2947 table = Table(table_name, self.db_meta, \
2948 Column('id', Integer, primary_key = True), \
2949 autoload=True, useexisting=True)
2950 setattr(self, 'tbl_%s' % table_name, table)
2952 for table_name in tables_no_primary:
2953 table = Table(table_name, self.db_meta, autoload=True)
2954 setattr(self, 'tbl_%s' % table_name, table)
2956 for view_name in views:
2957 view = Table(view_name, self.db_meta, autoload=True)
2958 setattr(self, 'view_%s' % view_name, view)
2960 def __setupmappers(self):
2961 mapper(Architecture, self.tbl_architecture,
2962 properties = dict(arch_id = self.tbl_architecture.c.id,
2963 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2964 order_by='suite_name',
2965 backref=backref('architectures', order_by='arch_string'))),
2966 extension = validator)
2968 mapper(Archive, self.tbl_archive,
2969 properties = dict(archive_id = self.tbl_archive.c.id,
2970 archive_name = self.tbl_archive.c.name))
2972 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2973 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2974 filename = self.tbl_pending_bin_contents.c.filename,
2975 package = self.tbl_pending_bin_contents.c.package,
2976 version = self.tbl_pending_bin_contents.c.version,
2977 arch = self.tbl_pending_bin_contents.c.arch,
2978 otype = self.tbl_pending_bin_contents.c.type))
2980 mapper(DebContents, self.tbl_deb_contents,
2981 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2982 package=self.tbl_deb_contents.c.package,
2983 suite=self.tbl_deb_contents.c.suite,
2984 arch=self.tbl_deb_contents.c.arch,
2985 section=self.tbl_deb_contents.c.section,
2986 filename=self.tbl_deb_contents.c.filename))
2988 mapper(UdebContents, self.tbl_udeb_contents,
2989 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2990 package=self.tbl_udeb_contents.c.package,
2991 suite=self.tbl_udeb_contents.c.suite,
2992 arch=self.tbl_udeb_contents.c.arch,
2993 section=self.tbl_udeb_contents.c.section,
2994 filename=self.tbl_udeb_contents.c.filename))
2996 mapper(BuildQueue, self.tbl_build_queue,
2997 properties = dict(queue_id = self.tbl_build_queue.c.id))
2999 mapper(BuildQueueFile, self.tbl_build_queue_files,
3000 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3001 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3003 mapper(DBBinary, self.tbl_binaries,
3004 properties = dict(binary_id = self.tbl_binaries.c.id,
3005 package = self.tbl_binaries.c.package,
3006 version = self.tbl_binaries.c.version,
3007 maintainer_id = self.tbl_binaries.c.maintainer,
3008 maintainer = relation(Maintainer),
3009 source_id = self.tbl_binaries.c.source,
3010 source = relation(DBSource, backref='binaries'),
3011 arch_id = self.tbl_binaries.c.architecture,
3012 architecture = relation(Architecture),
3013 poolfile_id = self.tbl_binaries.c.file,
3014 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3015 binarytype = self.tbl_binaries.c.type,
3016 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3017 fingerprint = relation(Fingerprint),
3018 install_date = self.tbl_binaries.c.install_date,
3019 suites = relation(Suite, secondary=self.tbl_bin_associations,
3020 backref=backref('binaries', lazy='dynamic'))),
3021 extension = validator)
3023 mapper(BinaryACL, self.tbl_binary_acl,
3024 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3026 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3027 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3028 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3029 architecture = relation(Architecture)))
3031 mapper(Component, self.tbl_component,
3032 properties = dict(component_id = self.tbl_component.c.id,
3033 component_name = self.tbl_component.c.name),
3034 extension = validator)
3036 mapper(DBConfig, self.tbl_config,
3037 properties = dict(config_id = self.tbl_config.c.id))
3039 mapper(DSCFile, self.tbl_dsc_files,
3040 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3041 source_id = self.tbl_dsc_files.c.source,
3042 source = relation(DBSource),
3043 poolfile_id = self.tbl_dsc_files.c.file,
3044 poolfile = relation(PoolFile)))
3046 mapper(PoolFile, self.tbl_files,
3047 properties = dict(file_id = self.tbl_files.c.id,
3048 filesize = self.tbl_files.c.size,
3049 location_id = self.tbl_files.c.location,
3050 location = relation(Location,
3051 # using lazy='dynamic' in the back
3052 # reference because we have A LOT of
3053 # files in one location
3054 backref=backref('files', lazy='dynamic'))),
3055 extension = validator)
3057 mapper(Fingerprint, self.tbl_fingerprint,
3058 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3059 uid_id = self.tbl_fingerprint.c.uid,
3060 uid = relation(Uid),
3061 keyring_id = self.tbl_fingerprint.c.keyring,
3062 keyring = relation(Keyring),
3063 source_acl = relation(SourceACL),
3064 binary_acl = relation(BinaryACL)),
3065 extension = validator)
3067 mapper(Keyring, self.tbl_keyrings,
3068 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3069 keyring_id = self.tbl_keyrings.c.id))
3071 mapper(DBChange, self.tbl_changes,
3072 properties = dict(change_id = self.tbl_changes.c.id,
3073 poolfiles = relation(PoolFile,
3074 secondary=self.tbl_changes_pool_files,
3075 backref="changeslinks"),
3076 seen = self.tbl_changes.c.seen,
3077 source = self.tbl_changes.c.source,
3078 binaries = self.tbl_changes.c.binaries,
3079 architecture = self.tbl_changes.c.architecture,
3080 distribution = self.tbl_changes.c.distribution,
3081 urgency = self.tbl_changes.c.urgency,
3082 maintainer = self.tbl_changes.c.maintainer,
3083 changedby = self.tbl_changes.c.changedby,
3084 date = self.tbl_changes.c.date,
3085 version = self.tbl_changes.c.version,
3086 files = relation(ChangePendingFile,
3087 secondary=self.tbl_changes_pending_files_map,
3088 backref="changesfile"),
3089 in_queue_id = self.tbl_changes.c.in_queue,
3090 in_queue = relation(PolicyQueue,
3091 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3092 approved_for_id = self.tbl_changes.c.approved_for))
3094 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3095 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3097 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3098 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3099 filename = self.tbl_changes_pending_files.c.filename,
3100 size = self.tbl_changes_pending_files.c.size,
3101 md5sum = self.tbl_changes_pending_files.c.md5sum,
3102 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3103 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3105 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3106 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3107 change = relation(DBChange),
3108 maintainer = relation(Maintainer,
3109 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3110 changedby = relation(Maintainer,
3111 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3112 fingerprint = relation(Fingerprint),
3113 source_files = relation(ChangePendingFile,
3114 secondary=self.tbl_changes_pending_source_files,
3115 backref="pending_sources")))
3118 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3119 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3120 keyring = relation(Keyring, backref="keyring_acl_map"),
3121 architecture = relation(Architecture)))
3123 mapper(Location, self.tbl_location,
3124 properties = dict(location_id = self.tbl_location.c.id,
3125 component_id = self.tbl_location.c.component,
3126 component = relation(Component, \
3127 backref=backref('location', uselist = False)),
3128 archive_id = self.tbl_location.c.archive,
3129 archive = relation(Archive),
3130 # FIXME: the 'type' column is old cruft and
3131 # should be removed in the future.
3132 archive_type = self.tbl_location.c.type),
3133 extension = validator)
3135 mapper(Maintainer, self.tbl_maintainer,
3136 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3137 maintains_sources = relation(DBSource, backref='maintainer',
3138 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3139 changed_sources = relation(DBSource, backref='changedby',
3140 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3141 extension = validator)
3143 mapper(NewComment, self.tbl_new_comments,
3144 properties = dict(comment_id = self.tbl_new_comments.c.id))
3146 mapper(Override, self.tbl_override,
3147 properties = dict(suite_id = self.tbl_override.c.suite,
3148 suite = relation(Suite),
3149 package = self.tbl_override.c.package,
3150 component_id = self.tbl_override.c.component,
3151 component = relation(Component),
3152 priority_id = self.tbl_override.c.priority,
3153 priority = relation(Priority),
3154 section_id = self.tbl_override.c.section,
3155 section = relation(Section),
3156 overridetype_id = self.tbl_override.c.type,
3157 overridetype = relation(OverrideType)))
3159 mapper(OverrideType, self.tbl_override_type,
3160 properties = dict(overridetype = self.tbl_override_type.c.type,
3161 overridetype_id = self.tbl_override_type.c.id))
3163 mapper(PolicyQueue, self.tbl_policy_queue,
3164 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3166 mapper(Priority, self.tbl_priority,
3167 properties = dict(priority_id = self.tbl_priority.c.id))
3169 mapper(Section, self.tbl_section,
3170 properties = dict(section_id = self.tbl_section.c.id,
3171 section=self.tbl_section.c.section))
3173 mapper(DBSource, self.tbl_source,
3174 properties = dict(source_id = self.tbl_source.c.id,
3175 version = self.tbl_source.c.version,
3176 maintainer_id = self.tbl_source.c.maintainer,
3177 poolfile_id = self.tbl_source.c.file,
3178 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3179 fingerprint_id = self.tbl_source.c.sig_fpr,
3180 fingerprint = relation(Fingerprint),
3181 changedby_id = self.tbl_source.c.changedby,
3182 srcfiles = relation(DSCFile,
3183 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3184 suites = relation(Suite, secondary=self.tbl_src_associations,
3185 backref=backref('sources', lazy='dynamic')),
3186 srcuploaders = relation(SrcUploader)),
3187 extension = validator)
3189 mapper(SourceACL, self.tbl_source_acl,
3190 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3192 mapper(SrcFormat, self.tbl_src_format,
3193 properties = dict(src_format_id = self.tbl_src_format.c.id,
3194 format_name = self.tbl_src_format.c.format_name))
3196 mapper(SrcUploader, self.tbl_src_uploaders,
3197 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3198 source_id = self.tbl_src_uploaders.c.source,
3199 source = relation(DBSource,
3200 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3201 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3202 maintainer = relation(Maintainer,
3203 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3205 mapper(Suite, self.tbl_suite,
3206 properties = dict(suite_id = self.tbl_suite.c.id,
3207 policy_queue = relation(PolicyQueue),
3208 copy_queues = relation(BuildQueue,
3209 secondary=self.tbl_suite_build_queue_copy)),
3210 extension = validator)
3212 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3213 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3214 suite = relation(Suite, backref='suitesrcformats'),
3215 src_format_id = self.tbl_suite_src_formats.c.src_format,
3216 src_format = relation(SrcFormat)))
3218 mapper(Uid, self.tbl_uid,
3219 properties = dict(uid_id = self.tbl_uid.c.id,
3220 fingerprint = relation(Fingerprint)),
3221 extension = validator)
3223 mapper(UploadBlock, self.tbl_upload_blocks,
3224 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3225 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3226 uid = relation(Uid, backref="uploadblocks")))
3228 ## Connection functions
3229 def __createconn(self):
3230 from config import Config
3234 connstr = "postgres://%s" % cnf["DB::Host"]
3235 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3236 connstr += ":%s" % cnf["DB::Port"]
3237 connstr += "/%s" % cnf["DB::Name"]
3240 connstr = "postgres:///%s" % cnf["DB::Name"]
3241 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3242 connstr += "?port=%s" % cnf["DB::Port"]
3244 self.db_pg = create_engine(connstr, echo=self.debug)
3245 self.db_meta = MetaData()
3246 self.db_meta.bind = self.db_pg
3247 self.db_smaker = sessionmaker(bind=self.db_pg,
3251 self.__setuptables()
3252 self.__setupmappers()
3255 return self.db_smaker()
3257 __all__.append('DBConn')