5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE, object_mapper
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
277 def get(cls, primary_key, session = None):
279 This is a support function that allows getting an object by its primary
282 Architecture.get(3[, session])
284 instead of the more verbose
286 session.query(Architecture).get(3)
288 return session.query(cls).get(primary_key)
290 def session(self, replace = False):
292 Returns the current session that is associated with the object. May
293 return None is object is in detached state.
296 return object_session(self)
298 def clone(self, session = None):
300 Clones the current object in a new session and returns the new clone. A
301 fresh session is created if the optional session parameter is not
302 provided. The function will fail if a session is provided and has
305 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
306 an existing object to allow several threads to work with their own
307 instances of an ORMObject.
309 WARNING: Only persistent (committed) objects can be cloned. Changes
310 made to the original object that are not committed yet will get lost.
311 The session of the new object will always be rolled back to avoid
315 if self.session() is None:
316 raise RuntimeError( \
317 'Method clone() failed for detached object:\n%s' % self)
318 self.session().flush()
319 mapper = object_mapper(self)
320 primary_key = mapper.primary_key_from_instance(self)
321 object_class = self.__class__
323 session = DBConn().session()
324 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
325 raise RuntimeError( \
326 'Method clone() failed due to unflushed changes in session.')
327 new_object = session.query(object_class).get(primary_key)
329 if new_object is None:
330 raise RuntimeError( \
331 'Method clone() failed for non-persistent object:\n%s' % self)
334 __all__.append('ORMObject')
336 ################################################################################
338 class Validator(MapperExtension):
340 This class calls the validate() method for each instance for the
341 'before_update' and 'before_insert' events. A global object validator is
342 used for configuring the individual mappers.
345 def before_update(self, mapper, connection, instance):
349 def before_insert(self, mapper, connection, instance):
353 validator = Validator()
355 ################################################################################
357 class Architecture(ORMObject):
358 def __init__(self, arch_string = None, description = None):
359 self.arch_string = arch_string
360 self.description = description
362 def __eq__(self, val):
363 if isinstance(val, str):
364 return (self.arch_string== val)
365 # This signals to use the normal comparison operator
366 return NotImplemented
368 def __ne__(self, val):
369 if isinstance(val, str):
370 return (self.arch_string != val)
371 # This signals to use the normal comparison operator
372 return NotImplemented
374 def properties(self):
375 return ['arch_string', 'arch_id', 'suites_count']
377 def not_null_constraints(self):
378 return ['arch_string']
380 __all__.append('Architecture')
383 def get_architecture(architecture, session=None):
385 Returns database id for given C{architecture}.
387 @type architecture: string
388 @param architecture: The name of the architecture
390 @type session: Session
391 @param session: Optional SQLA session object (a temporary one will be
392 generated if not supplied)
395 @return: Architecture object for the given arch (None if not present)
398 q = session.query(Architecture).filter_by(arch_string=architecture)
402 except NoResultFound:
405 __all__.append('get_architecture')
407 # TODO: should be removed because the implementation is too trivial
409 def get_architecture_suites(architecture, session=None):
411 Returns list of Suite objects for given C{architecture} name
413 @type architecture: str
414 @param architecture: Architecture name to search for
416 @type session: Session
417 @param session: Optional SQL session object (a temporary one will be
418 generated if not supplied)
421 @return: list of Suite objects for the given name (may be empty)
424 return get_architecture(architecture, session).suites
426 __all__.append('get_architecture_suites')
428 ################################################################################
430 class Archive(object):
431 def __init__(self, *args, **kwargs):
435 return '<Archive %s>' % self.archive_name
437 __all__.append('Archive')
440 def get_archive(archive, session=None):
442 returns database id for given C{archive}.
444 @type archive: string
445 @param archive: the name of the arhive
447 @type session: Session
448 @param session: Optional SQLA session object (a temporary one will be
449 generated if not supplied)
452 @return: Archive object for the given name (None if not present)
455 archive = archive.lower()
457 q = session.query(Archive).filter_by(archive_name=archive)
461 except NoResultFound:
464 __all__.append('get_archive')
466 ################################################################################
468 class BinContents(object):
469 def __init__(self, *args, **kwargs):
473 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
475 __all__.append('BinContents')
477 ################################################################################
479 class DBBinary(ORMObject):
480 def __init__(self, package = None, source = None, version = None, \
481 maintainer = None, architecture = None, poolfile = None, \
483 self.package = package
485 self.version = version
486 self.maintainer = maintainer
487 self.architecture = architecture
488 self.poolfile = poolfile
489 self.binarytype = binarytype
491 def properties(self):
492 return ['package', 'version', 'maintainer', 'source', 'architecture', \
493 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
494 'suites_count', 'binary_id']
496 def not_null_constraints(self):
497 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
500 def get_component_name(self):
501 return self.poolfile.location.component.component_name
503 __all__.append('DBBinary')
506 def get_suites_binary_in(package, session=None):
508 Returns list of Suite objects which given C{package} name is in
511 @param package: DBBinary package name to search for
514 @return: list of Suite objects for the given package
517 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
519 __all__.append('get_suites_binary_in')
522 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
524 Returns the component name of the newest binary package in suite_list or
525 None if no package is found. The result can be optionally filtered by a list
526 of architecture names.
529 @param package: DBBinary package name to search for
531 @type suite_list: list of str
532 @param suite_list: list of suite_name items
534 @type arch_list: list of str
535 @param arch_list: optional list of arch_string items that defaults to []
537 @rtype: str or NoneType
538 @return: name of component or None
541 q = session.query(DBBinary).filter_by(package = package). \
542 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
543 if len(arch_list) > 0:
544 q = q.join(DBBinary.architecture). \
545 filter(Architecture.arch_string.in_(arch_list))
546 binary = q.order_by(desc(DBBinary.version)).first()
550 return binary.get_component_name()
552 __all__.append('get_component_by_package_suite')
554 ################################################################################
556 class BinaryACL(object):
557 def __init__(self, *args, **kwargs):
561 return '<BinaryACL %s>' % self.binary_acl_id
563 __all__.append('BinaryACL')
565 ################################################################################
567 class BinaryACLMap(object):
568 def __init__(self, *args, **kwargs):
572 return '<BinaryACLMap %s>' % self.binary_acl_map_id
574 __all__.append('BinaryACLMap')
576 ################################################################################
581 ArchiveDir "%(archivepath)s";
582 OverrideDir "%(overridedir)s";
583 CacheDir "%(cachedir)s";
588 Packages::Compress ". bzip2 gzip";
589 Sources::Compress ". bzip2 gzip";
594 bindirectory "incoming"
599 BinOverride "override.sid.all3";
600 BinCacheDB "packages-accepted.db";
602 FileList "%(filelist)s";
605 Packages::Extensions ".deb .udeb";
608 bindirectory "incoming/"
611 BinOverride "override.sid.all3";
612 SrcOverride "override.sid.all3.src";
613 FileList "%(filelist)s";
617 class BuildQueue(object):
618 def __init__(self, *args, **kwargs):
622 return '<BuildQueue %s>' % self.queue_name
624 def write_metadata(self, starttime, force=False):
625 # Do we write out metafiles?
626 if not (force or self.generate_metadata):
629 session = DBConn().session().object_session(self)
631 fl_fd = fl_name = ac_fd = ac_name = None
633 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
634 startdir = os.getcwd()
637 # Grab files we want to include
638 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
639 # Write file list with newer files
640 (fl_fd, fl_name) = mkstemp()
642 os.write(fl_fd, '%s\n' % n.fullpath)
647 # Write minimal apt.conf
648 # TODO: Remove hardcoding from template
649 (ac_fd, ac_name) = mkstemp()
650 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
652 'cachedir': cnf["Dir::Cache"],
653 'overridedir': cnf["Dir::Override"],
657 # Run apt-ftparchive generate
658 os.chdir(os.path.dirname(ac_name))
659 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
661 # Run apt-ftparchive release
662 # TODO: Eww - fix this
663 bname = os.path.basename(self.path)
667 # We have to remove the Release file otherwise it'll be included in the
670 os.unlink(os.path.join(bname, 'Release'))
674 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
676 # Crude hack with open and append, but this whole section is and should be redone.
677 if self.notautomatic:
678 release=open("Release", "a")
679 release.write("NotAutomatic: yes")
684 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
685 if cnf.has_key("Dinstall::SigningPubKeyring"):
686 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
688 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
690 # Move the files if we got this far
691 os.rename('Release', os.path.join(bname, 'Release'))
693 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
695 # Clean up any left behind files
722 def clean_and_update(self, starttime, Logger, dryrun=False):
723 """WARNING: This routine commits for you"""
724 session = DBConn().session().object_session(self)
726 if self.generate_metadata and not dryrun:
727 self.write_metadata(starttime)
729 # Grab files older than our execution time
730 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
736 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
738 Logger.log(["I: Removing %s from the queue" % o.fullpath])
739 os.unlink(o.fullpath)
742 # If it wasn't there, don't worry
743 if e.errno == ENOENT:
746 # TODO: Replace with proper logging call
747 Logger.log(["E: Could not remove %s" % o.fullpath])
754 for f in os.listdir(self.path):
755 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
759 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
760 except NoResultFound:
761 fp = os.path.join(self.path, f)
763 Logger.log(["I: Would remove unused link %s" % fp])
765 Logger.log(["I: Removing unused link %s" % fp])
769 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
771 def add_file_from_pool(self, poolfile):
772 """Copies a file into the pool. Assumes that the PoolFile object is
773 attached to the same SQLAlchemy session as the Queue object is.
775 The caller is responsible for committing after calling this function."""
776 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
778 # Check if we have a file of this name or this ID already
779 for f in self.queuefiles:
780 if f.fileid is not None and f.fileid == poolfile.file_id or \
781 f.poolfile.filename == poolfile_basename:
782 # In this case, update the BuildQueueFile entry so we
783 # don't remove it too early
784 f.lastused = datetime.now()
785 DBConn().session().object_session(poolfile).add(f)
788 # Prepare BuildQueueFile object
789 qf = BuildQueueFile()
790 qf.build_queue_id = self.queue_id
791 qf.lastused = datetime.now()
792 qf.filename = poolfile_basename
794 targetpath = poolfile.fullpath
795 queuepath = os.path.join(self.path, poolfile_basename)
799 # We need to copy instead of symlink
801 utils.copy(targetpath, queuepath)
802 # NULL in the fileid field implies a copy
805 os.symlink(targetpath, queuepath)
806 qf.fileid = poolfile.file_id
810 # Get the same session as the PoolFile is using and add the qf to it
811 DBConn().session().object_session(poolfile).add(qf)
816 __all__.append('BuildQueue')
819 def get_build_queue(queuename, session=None):
821 Returns BuildQueue object for given C{queue name}, creating it if it does not
824 @type queuename: string
825 @param queuename: The name of the queue
827 @type session: Session
828 @param session: Optional SQLA session object (a temporary one will be
829 generated if not supplied)
832 @return: BuildQueue object for the given queue
835 q = session.query(BuildQueue).filter_by(queue_name=queuename)
839 except NoResultFound:
842 __all__.append('get_build_queue')
844 ################################################################################
846 class BuildQueueFile(object):
847 def __init__(self, *args, **kwargs):
851 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
855 return os.path.join(self.buildqueue.path, self.filename)
858 __all__.append('BuildQueueFile')
860 ################################################################################
862 class ChangePendingBinary(object):
863 def __init__(self, *args, **kwargs):
867 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
869 __all__.append('ChangePendingBinary')
871 ################################################################################
873 class ChangePendingFile(object):
874 def __init__(self, *args, **kwargs):
878 return '<ChangePendingFile %s>' % self.change_pending_file_id
880 __all__.append('ChangePendingFile')
882 ################################################################################
884 class ChangePendingSource(object):
885 def __init__(self, *args, **kwargs):
889 return '<ChangePendingSource %s>' % self.change_pending_source_id
891 __all__.append('ChangePendingSource')
893 ################################################################################
895 class Component(ORMObject):
896 def __init__(self, component_name = None):
897 self.component_name = component_name
899 def __eq__(self, val):
900 if isinstance(val, str):
901 return (self.component_name == val)
902 # This signals to use the normal comparison operator
903 return NotImplemented
905 def __ne__(self, val):
906 if isinstance(val, str):
907 return (self.component_name != val)
908 # This signals to use the normal comparison operator
909 return NotImplemented
911 def properties(self):
912 return ['component_name', 'component_id', 'description', 'location', \
915 def not_null_constraints(self):
916 return ['component_name']
919 __all__.append('Component')
922 def get_component(component, session=None):
924 Returns database id for given C{component}.
926 @type component: string
927 @param component: The name of the override type
930 @return: the database id for the given component
933 component = component.lower()
935 q = session.query(Component).filter_by(component_name=component)
939 except NoResultFound:
942 __all__.append('get_component')
944 ################################################################################
946 class DBConfig(object):
947 def __init__(self, *args, **kwargs):
951 return '<DBConfig %s>' % self.name
953 __all__.append('DBConfig')
955 ################################################################################
958 def get_or_set_contents_file_id(filename, session=None):
960 Returns database id for given filename.
962 If no matching file is found, a row is inserted.
964 @type filename: string
965 @param filename: The filename
966 @type session: SQLAlchemy
967 @param session: Optional SQL session object (a temporary one will be
968 generated if not supplied). If not passed, a commit will be performed at
969 the end of the function, otherwise the caller is responsible for commiting.
972 @return: the database id for the given component
975 q = session.query(ContentFilename).filter_by(filename=filename)
978 ret = q.one().cafilename_id
979 except NoResultFound:
980 cf = ContentFilename()
981 cf.filename = filename
983 session.commit_or_flush()
984 ret = cf.cafilename_id
988 __all__.append('get_or_set_contents_file_id')
991 def get_contents(suite, overridetype, section=None, session=None):
993 Returns contents for a suite / overridetype combination, limiting
994 to a section if not None.
997 @param suite: Suite object
999 @type overridetype: OverrideType
1000 @param overridetype: OverrideType object
1002 @type section: Section
1003 @param section: Optional section object to limit results to
1005 @type session: SQLAlchemy
1006 @param session: Optional SQL session object (a temporary one will be
1007 generated if not supplied)
1009 @rtype: ResultsProxy
1010 @return: ResultsProxy object set up to return tuples of (filename, section,
1014 # find me all of the contents for a given suite
1015 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1019 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1020 JOIN content_file_names n ON (c.filename=n.id)
1021 JOIN binaries b ON (b.id=c.binary_pkg)
1022 JOIN override o ON (o.package=b.package)
1023 JOIN section s ON (s.id=o.section)
1024 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1025 AND b.type=:overridetypename"""
1027 vals = {'suiteid': suite.suite_id,
1028 'overridetypeid': overridetype.overridetype_id,
1029 'overridetypename': overridetype.overridetype}
1031 if section is not None:
1032 contents_q += " AND s.id = :sectionid"
1033 vals['sectionid'] = section.section_id
1035 contents_q += " ORDER BY fn"
1037 return session.execute(contents_q, vals)
1039 __all__.append('get_contents')
1041 ################################################################################
1043 class ContentFilepath(object):
1044 def __init__(self, *args, **kwargs):
1048 return '<ContentFilepath %s>' % self.filepath
1050 __all__.append('ContentFilepath')
1053 def get_or_set_contents_path_id(filepath, session=None):
1055 Returns database id for given path.
1057 If no matching file is found, a row is inserted.
1059 @type filepath: string
1060 @param filepath: The filepath
1062 @type session: SQLAlchemy
1063 @param session: Optional SQL session object (a temporary one will be
1064 generated if not supplied). If not passed, a commit will be performed at
1065 the end of the function, otherwise the caller is responsible for commiting.
1068 @return: the database id for the given path
1071 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1074 ret = q.one().cafilepath_id
1075 except NoResultFound:
1076 cf = ContentFilepath()
1077 cf.filepath = filepath
1079 session.commit_or_flush()
1080 ret = cf.cafilepath_id
1084 __all__.append('get_or_set_contents_path_id')
1086 ################################################################################
1088 class ContentAssociation(object):
1089 def __init__(self, *args, **kwargs):
1093 return '<ContentAssociation %s>' % self.ca_id
1095 __all__.append('ContentAssociation')
1097 def insert_content_paths(binary_id, fullpaths, session=None):
1099 Make sure given path is associated with given binary id
1101 @type binary_id: int
1102 @param binary_id: the id of the binary
1103 @type fullpaths: list
1104 @param fullpaths: the list of paths of the file being associated with the binary
1105 @type session: SQLAlchemy session
1106 @param session: Optional SQLAlchemy session. If this is passed, the caller
1107 is responsible for ensuring a transaction has begun and committing the
1108 results or rolling back based on the result code. If not passed, a commit
1109 will be performed at the end of the function, otherwise the caller is
1110 responsible for commiting.
1112 @return: True upon success
1115 privatetrans = False
1117 session = DBConn().session()
1122 def generate_path_dicts():
1123 for fullpath in fullpaths:
1124 if fullpath.startswith( './' ):
1125 fullpath = fullpath[2:]
1127 yield {'filename':fullpath, 'id': binary_id }
1129 for d in generate_path_dicts():
1130 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1139 traceback.print_exc()
1141 # Only rollback if we set up the session ourself
1148 __all__.append('insert_content_paths')
1150 ################################################################################
1152 class DSCFile(object):
1153 def __init__(self, *args, **kwargs):
1157 return '<DSCFile %s>' % self.dscfile_id
1159 __all__.append('DSCFile')
1162 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1164 Returns a list of DSCFiles which may be empty
1166 @type dscfile_id: int (optional)
1167 @param dscfile_id: the dscfile_id of the DSCFiles to find
1169 @type source_id: int (optional)
1170 @param source_id: the source id related to the DSCFiles to find
1172 @type poolfile_id: int (optional)
1173 @param poolfile_id: the poolfile id related to the DSCFiles to find
1176 @return: Possibly empty list of DSCFiles
1179 q = session.query(DSCFile)
1181 if dscfile_id is not None:
1182 q = q.filter_by(dscfile_id=dscfile_id)
1184 if source_id is not None:
1185 q = q.filter_by(source_id=source_id)
1187 if poolfile_id is not None:
1188 q = q.filter_by(poolfile_id=poolfile_id)
1192 __all__.append('get_dscfiles')
1194 ################################################################################
1196 class PoolFile(ORMObject):
1197 def __init__(self, filename = None, location = None, filesize = -1, \
1199 self.filename = filename
1200 self.location = location
1201 self.filesize = filesize
1202 self.md5sum = md5sum
1206 return os.path.join(self.location.path, self.filename)
1208 def is_valid(self, filesize = -1, md5sum = None):
1209 return self.filesize == long(filesize) and self.md5sum == md5sum
1211 def properties(self):
1212 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1213 'sha256sum', 'location', 'source', 'binary', 'last_used']
1215 def not_null_constraints(self):
1216 return ['filename', 'md5sum', 'location']
1218 __all__.append('PoolFile')
1221 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1224 (ValidFileFound [boolean], PoolFile object or None)
1226 @type filename: string
1227 @param filename: the filename of the file to check against the DB
1230 @param filesize: the size of the file to check against the DB
1232 @type md5sum: string
1233 @param md5sum: the md5sum of the file to check against the DB
1235 @type location_id: int
1236 @param location_id: the id of the location to look in
1239 @return: Tuple of length 2.
1240 - If valid pool file found: (C{True}, C{PoolFile object})
1241 - If valid pool file not found:
1242 - (C{False}, C{None}) if no file found
1243 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1246 poolfile = session.query(Location).get(location_id). \
1247 files.filter_by(filename=filename).first()
1249 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1252 return (valid, poolfile)
1254 __all__.append('check_poolfile')
1256 # TODO: the implementation can trivially be inlined at the place where the
1257 # function is called
1259 def get_poolfile_by_id(file_id, session=None):
1261 Returns a PoolFile objects or None for the given id
1264 @param file_id: the id of the file to look for
1266 @rtype: PoolFile or None
1267 @return: either the PoolFile object or None
1270 return session.query(PoolFile).get(file_id)
1272 __all__.append('get_poolfile_by_id')
1275 def get_poolfile_like_name(filename, session=None):
1277 Returns an array of PoolFile objects which are like the given name
1279 @type filename: string
1280 @param filename: the filename of the file to check against the DB
1283 @return: array of PoolFile objects
1286 # TODO: There must be a way of properly using bind parameters with %FOO%
1287 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1291 __all__.append('get_poolfile_like_name')
1294 def add_poolfile(filename, datadict, location_id, session=None):
1296 Add a new file to the pool
1298 @type filename: string
1299 @param filename: filename
1301 @type datadict: dict
1302 @param datadict: dict with needed data
1304 @type location_id: int
1305 @param location_id: database id of the location
1308 @return: the PoolFile object created
1310 poolfile = PoolFile()
1311 poolfile.filename = filename
1312 poolfile.filesize = datadict["size"]
1313 poolfile.md5sum = datadict["md5sum"]
1314 poolfile.sha1sum = datadict["sha1sum"]
1315 poolfile.sha256sum = datadict["sha256sum"]
1316 poolfile.location_id = location_id
1318 session.add(poolfile)
1319 # Flush to get a file id (NB: This is not a commit)
1324 __all__.append('add_poolfile')
1326 ################################################################################
1328 class Fingerprint(ORMObject):
1329 def __init__(self, fingerprint = None):
1330 self.fingerprint = fingerprint
1332 def properties(self):
1333 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1336 def not_null_constraints(self):
1337 return ['fingerprint']
1339 __all__.append('Fingerprint')
1342 def get_fingerprint(fpr, session=None):
1344 Returns Fingerprint object for given fpr.
1347 @param fpr: The fpr to find / add
1349 @type session: SQLAlchemy
1350 @param session: Optional SQL session object (a temporary one will be
1351 generated if not supplied).
1354 @return: the Fingerprint object for the given fpr or None
1357 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1361 except NoResultFound:
1366 __all__.append('get_fingerprint')
1369 def get_or_set_fingerprint(fpr, session=None):
1371 Returns Fingerprint object for given fpr.
1373 If no matching fpr is found, a row is inserted.
1376 @param fpr: The fpr to find / add
1378 @type session: SQLAlchemy
1379 @param session: Optional SQL session object (a temporary one will be
1380 generated if not supplied). If not passed, a commit will be performed at
1381 the end of the function, otherwise the caller is responsible for commiting.
1382 A flush will be performed either way.
1385 @return: the Fingerprint object for the given fpr
1388 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1392 except NoResultFound:
1393 fingerprint = Fingerprint()
1394 fingerprint.fingerprint = fpr
1395 session.add(fingerprint)
1396 session.commit_or_flush()
1401 __all__.append('get_or_set_fingerprint')
1403 ################################################################################
1405 # Helper routine for Keyring class
1406 def get_ldap_name(entry):
1408 for k in ["cn", "mn", "sn"]:
1410 if ret and ret[0] != "" and ret[0] != "-":
1412 return " ".join(name)
1414 ################################################################################
1416 class Keyring(object):
1417 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1418 " --with-colons --fingerprint --fingerprint"
1423 def __init__(self, *args, **kwargs):
1427 return '<Keyring %s>' % self.keyring_name
1429 def de_escape_gpg_str(self, txt):
1430 esclist = re.split(r'(\\x..)', txt)
1431 for x in range(1,len(esclist),2):
1432 esclist[x] = "%c" % (int(esclist[x][2:],16))
1433 return "".join(esclist)
1435 def parse_address(self, uid):
1436 """parses uid and returns a tuple of real name and email address"""
1438 (name, address) = email.Utils.parseaddr(uid)
1439 name = re.sub(r"\s*[(].*[)]", "", name)
1440 name = self.de_escape_gpg_str(name)
1443 return (name, address)
1445 def load_keys(self, keyring):
1446 if not self.keyring_id:
1447 raise Exception('Must be initialized with database information')
1449 k = os.popen(self.gpg_invocation % keyring, "r")
1453 for line in k.xreadlines():
1454 field = line.split(":")
1455 if field[0] == "pub":
1458 (name, addr) = self.parse_address(field[9])
1460 self.keys[key]["email"] = addr
1461 self.keys[key]["name"] = name
1462 self.keys[key]["fingerprints"] = []
1464 elif key and field[0] == "sub" and len(field) >= 12:
1465 signingkey = ("s" in field[11])
1466 elif key and field[0] == "uid":
1467 (name, addr) = self.parse_address(field[9])
1468 if "email" not in self.keys[key] and "@" in addr:
1469 self.keys[key]["email"] = addr
1470 self.keys[key]["name"] = name
1471 elif signingkey and field[0] == "fpr":
1472 self.keys[key]["fingerprints"].append(field[9])
1473 self.fpr_lookup[field[9]] = key
1475 def import_users_from_ldap(self, session):
1479 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1480 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1482 l = ldap.open(LDAPServer)
1483 l.simple_bind_s("","")
1484 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1485 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1486 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1488 ldap_fin_uid_id = {}
1495 uid = entry["uid"][0]
1496 name = get_ldap_name(entry)
1497 fingerprints = entry["keyFingerPrint"]
1499 for f in fingerprints:
1500 key = self.fpr_lookup.get(f, None)
1501 if key not in self.keys:
1503 self.keys[key]["uid"] = uid
1507 keyid = get_or_set_uid(uid, session).uid_id
1508 byuid[keyid] = (uid, name)
1509 byname[uid] = (keyid, name)
1511 return (byname, byuid)
1513 def generate_users_from_keyring(self, format, session):
1517 for x in self.keys.keys():
1518 if "email" not in self.keys[x]:
1520 self.keys[x]["uid"] = format % "invalid-uid"
1522 uid = format % self.keys[x]["email"]
1523 keyid = get_or_set_uid(uid, session).uid_id
1524 byuid[keyid] = (uid, self.keys[x]["name"])
1525 byname[uid] = (keyid, self.keys[x]["name"])
1526 self.keys[x]["uid"] = uid
1529 uid = format % "invalid-uid"
1530 keyid = get_or_set_uid(uid, session).uid_id
1531 byuid[keyid] = (uid, "ungeneratable user id")
1532 byname[uid] = (keyid, "ungeneratable user id")
1534 return (byname, byuid)
1536 __all__.append('Keyring')
1539 def get_keyring(keyring, session=None):
1541 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1542 If C{keyring} already has an entry, simply return the existing Keyring
1544 @type keyring: string
1545 @param keyring: the keyring name
1548 @return: the Keyring object for this keyring
1551 q = session.query(Keyring).filter_by(keyring_name=keyring)
1555 except NoResultFound:
1558 __all__.append('get_keyring')
1560 ################################################################################
1562 class KeyringACLMap(object):
1563 def __init__(self, *args, **kwargs):
1567 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1569 __all__.append('KeyringACLMap')
1571 ################################################################################
1573 class DBChange(object):
1574 def __init__(self, *args, **kwargs):
1578 return '<DBChange %s>' % self.changesname
1580 def clean_from_queue(self):
1581 session = DBConn().session().object_session(self)
1583 # Remove changes_pool_files entries
1586 # Remove changes_pending_files references
1589 # Clear out of queue
1590 self.in_queue = None
1591 self.approved_for_id = None
1593 __all__.append('DBChange')
1596 def get_dbchange(filename, session=None):
1598 returns DBChange object for given C{filename}.
1600 @type filename: string
1601 @param filename: the name of the file
1603 @type session: Session
1604 @param session: Optional SQLA session object (a temporary one will be
1605 generated if not supplied)
1608 @return: DBChange object for the given filename (C{None} if not present)
1611 q = session.query(DBChange).filter_by(changesname=filename)
1615 except NoResultFound:
1618 __all__.append('get_dbchange')
1620 ################################################################################
1622 class Location(ORMObject):
1623 def __init__(self, path = None, component = None):
1625 self.component = component
1626 # the column 'type' should go away, see comment at mapper
1627 self.archive_type = 'pool'
1629 def properties(self):
1630 return ['path', 'location_id', 'archive_type', 'component', \
1633 def not_null_constraints(self):
1634 return ['path', 'archive_type']
1636 __all__.append('Location')
1639 def get_location(location, component=None, archive=None, session=None):
1641 Returns Location object for the given combination of location, component
1644 @type location: string
1645 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1647 @type component: string
1648 @param component: the component name (if None, no restriction applied)
1650 @type archive: string
1651 @param archive: the archive name (if None, no restriction applied)
1653 @rtype: Location / None
1654 @return: Either a Location object or None if one can't be found
1657 q = session.query(Location).filter_by(path=location)
1659 if archive is not None:
1660 q = q.join(Archive).filter_by(archive_name=archive)
1662 if component is not None:
1663 q = q.join(Component).filter_by(component_name=component)
1667 except NoResultFound:
1670 __all__.append('get_location')
1672 ################################################################################
1674 class Maintainer(ORMObject):
1675 def __init__(self, name = None):
1678 def properties(self):
1679 return ['name', 'maintainer_id']
1681 def not_null_constraints(self):
1684 def get_split_maintainer(self):
1685 if not hasattr(self, 'name') or self.name is None:
1686 return ('', '', '', '')
1688 return fix_maintainer(self.name.strip())
1690 __all__.append('Maintainer')
1693 def get_or_set_maintainer(name, session=None):
1695 Returns Maintainer object for given maintainer name.
1697 If no matching maintainer name is found, a row is inserted.
1700 @param name: The maintainer name to add
1702 @type session: SQLAlchemy
1703 @param session: Optional SQL session object (a temporary one will be
1704 generated if not supplied). If not passed, a commit will be performed at
1705 the end of the function, otherwise the caller is responsible for commiting.
1706 A flush will be performed either way.
1709 @return: the Maintainer object for the given maintainer
1712 q = session.query(Maintainer).filter_by(name=name)
1715 except NoResultFound:
1716 maintainer = Maintainer()
1717 maintainer.name = name
1718 session.add(maintainer)
1719 session.commit_or_flush()
1724 __all__.append('get_or_set_maintainer')
1727 def get_maintainer(maintainer_id, session=None):
1729 Return the name of the maintainer behind C{maintainer_id} or None if that
1730 maintainer_id is invalid.
1732 @type maintainer_id: int
1733 @param maintainer_id: the id of the maintainer
1736 @return: the Maintainer with this C{maintainer_id}
1739 return session.query(Maintainer).get(maintainer_id)
1741 __all__.append('get_maintainer')
1743 ################################################################################
1745 class NewComment(object):
1746 def __init__(self, *args, **kwargs):
1750 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1752 __all__.append('NewComment')
1755 def has_new_comment(package, version, session=None):
1757 Returns true if the given combination of C{package}, C{version} has a comment.
1759 @type package: string
1760 @param package: name of the package
1762 @type version: string
1763 @param version: package version
1765 @type session: Session
1766 @param session: Optional SQLA session object (a temporary one will be
1767 generated if not supplied)
1773 q = session.query(NewComment)
1774 q = q.filter_by(package=package)
1775 q = q.filter_by(version=version)
1777 return bool(q.count() > 0)
1779 __all__.append('has_new_comment')
1782 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1784 Returns (possibly empty) list of NewComment objects for the given
1787 @type package: string (optional)
1788 @param package: name of the package
1790 @type version: string (optional)
1791 @param version: package version
1793 @type comment_id: int (optional)
1794 @param comment_id: An id of a comment
1796 @type session: Session
1797 @param session: Optional SQLA session object (a temporary one will be
1798 generated if not supplied)
1801 @return: A (possibly empty) list of NewComment objects will be returned
1804 q = session.query(NewComment)
1805 if package is not None: q = q.filter_by(package=package)
1806 if version is not None: q = q.filter_by(version=version)
1807 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1811 __all__.append('get_new_comments')
1813 ################################################################################
1815 class Override(object):
1816 def __init__(self, *args, **kwargs):
1820 return '<Override %s (%s)>' % (self.package, self.suite_id)
1822 __all__.append('Override')
1825 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1827 Returns Override object for the given parameters
1829 @type package: string
1830 @param package: The name of the package
1832 @type suite: string, list or None
1833 @param suite: The name of the suite (or suites if a list) to limit to. If
1834 None, don't limit. Defaults to None.
1836 @type component: string, list or None
1837 @param component: The name of the component (or components if a list) to
1838 limit to. If None, don't limit. Defaults to None.
1840 @type overridetype: string, list or None
1841 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1842 limit to. If None, don't limit. Defaults to None.
1844 @type session: Session
1845 @param session: Optional SQLA session object (a temporary one will be
1846 generated if not supplied)
1849 @return: A (possibly empty) list of Override objects will be returned
1852 q = session.query(Override)
1853 q = q.filter_by(package=package)
1855 if suite is not None:
1856 if not isinstance(suite, list): suite = [suite]
1857 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1859 if component is not None:
1860 if not isinstance(component, list): component = [component]
1861 q = q.join(Component).filter(Component.component_name.in_(component))
1863 if overridetype is not None:
1864 if not isinstance(overridetype, list): overridetype = [overridetype]
1865 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1869 __all__.append('get_override')
1872 ################################################################################
1874 class OverrideType(object):
1875 def __init__(self, *args, **kwargs):
1879 return '<OverrideType %s>' % self.overridetype
1881 __all__.append('OverrideType')
1884 def get_override_type(override_type, session=None):
1886 Returns OverrideType object for given C{override type}.
1888 @type override_type: string
1889 @param override_type: The name of the override type
1891 @type session: Session
1892 @param session: Optional SQLA session object (a temporary one will be
1893 generated if not supplied)
1896 @return: the database id for the given override type
1899 q = session.query(OverrideType).filter_by(overridetype=override_type)
1903 except NoResultFound:
1906 __all__.append('get_override_type')
1908 ################################################################################
1910 class DebContents(object):
1911 def __init__(self, *args, **kwargs):
1915 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1917 __all__.append('DebContents')
1920 class UdebContents(object):
1921 def __init__(self, *args, **kwargs):
1925 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1927 __all__.append('UdebContents')
1929 class PendingBinContents(object):
1930 def __init__(self, *args, **kwargs):
1934 return '<PendingBinContents %s>' % self.contents_id
1936 __all__.append('PendingBinContents')
1938 def insert_pending_content_paths(package,
1943 Make sure given paths are temporarily associated with given
1947 @param package: the package to associate with should have been read in from the binary control file
1948 @type fullpaths: list
1949 @param fullpaths: the list of paths of the file being associated with the binary
1950 @type session: SQLAlchemy session
1951 @param session: Optional SQLAlchemy session. If this is passed, the caller
1952 is responsible for ensuring a transaction has begun and committing the
1953 results or rolling back based on the result code. If not passed, a commit
1954 will be performed at the end of the function
1956 @return: True upon success, False if there is a problem
1959 privatetrans = False
1962 session = DBConn().session()
1966 arch = get_architecture(package['Architecture'], session)
1967 arch_id = arch.arch_id
1969 # Remove any already existing recorded files for this package
1970 q = session.query(PendingBinContents)
1971 q = q.filter_by(package=package['Package'])
1972 q = q.filter_by(version=package['Version'])
1973 q = q.filter_by(architecture=arch_id)
1976 for fullpath in fullpaths:
1978 if fullpath.startswith( "./" ):
1979 fullpath = fullpath[2:]
1981 pca = PendingBinContents()
1982 pca.package = package['Package']
1983 pca.version = package['Version']
1985 pca.architecture = arch_id
1988 pca.type = 8 # gross
1990 pca.type = 7 # also gross
1993 # Only commit if we set up the session ourself
2001 except Exception, e:
2002 traceback.print_exc()
2004 # Only rollback if we set up the session ourself
2011 __all__.append('insert_pending_content_paths')
2013 ################################################################################
2015 class PolicyQueue(object):
2016 def __init__(self, *args, **kwargs):
2020 return '<PolicyQueue %s>' % self.queue_name
2022 __all__.append('PolicyQueue')
2025 def get_policy_queue(queuename, session=None):
2027 Returns PolicyQueue object for given C{queue name}
2029 @type queuename: string
2030 @param queuename: The name of the queue
2032 @type session: Session
2033 @param session: Optional SQLA session object (a temporary one will be
2034 generated if not supplied)
2037 @return: PolicyQueue object for the given queue
2040 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2044 except NoResultFound:
2047 __all__.append('get_policy_queue')
2050 def get_policy_queue_from_path(pathname, session=None):
2052 Returns PolicyQueue object for given C{path name}
2054 @type queuename: string
2055 @param queuename: The path
2057 @type session: Session
2058 @param session: Optional SQLA session object (a temporary one will be
2059 generated if not supplied)
2062 @return: PolicyQueue object for the given queue
2065 q = session.query(PolicyQueue).filter_by(path=pathname)
2069 except NoResultFound:
2072 __all__.append('get_policy_queue_from_path')
2074 ################################################################################
2076 class Priority(object):
2077 def __init__(self, *args, **kwargs):
2080 def __eq__(self, val):
2081 if isinstance(val, str):
2082 return (self.priority == val)
2083 # This signals to use the normal comparison operator
2084 return NotImplemented
2086 def __ne__(self, val):
2087 if isinstance(val, str):
2088 return (self.priority != val)
2089 # This signals to use the normal comparison operator
2090 return NotImplemented
2093 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2095 __all__.append('Priority')
2098 def get_priority(priority, session=None):
2100 Returns Priority object for given C{priority name}.
2102 @type priority: string
2103 @param priority: The name of the priority
2105 @type session: Session
2106 @param session: Optional SQLA session object (a temporary one will be
2107 generated if not supplied)
2110 @return: Priority object for the given priority
2113 q = session.query(Priority).filter_by(priority=priority)
2117 except NoResultFound:
2120 __all__.append('get_priority')
2123 def get_priorities(session=None):
2125 Returns dictionary of priority names -> id mappings
2127 @type session: Session
2128 @param session: Optional SQL session object (a temporary one will be
2129 generated if not supplied)
2132 @return: dictionary of priority names -> id mappings
2136 q = session.query(Priority)
2138 ret[x.priority] = x.priority_id
2142 __all__.append('get_priorities')
2144 ################################################################################
2146 class Section(object):
2147 def __init__(self, *args, **kwargs):
2150 def __eq__(self, val):
2151 if isinstance(val, str):
2152 return (self.section == val)
2153 # This signals to use the normal comparison operator
2154 return NotImplemented
2156 def __ne__(self, val):
2157 if isinstance(val, str):
2158 return (self.section != val)
2159 # This signals to use the normal comparison operator
2160 return NotImplemented
2163 return '<Section %s>' % self.section
2165 __all__.append('Section')
2168 def get_section(section, session=None):
2170 Returns Section object for given C{section name}.
2172 @type section: string
2173 @param section: The name of the section
2175 @type session: Session
2176 @param session: Optional SQLA session object (a temporary one will be
2177 generated if not supplied)
2180 @return: Section object for the given section name
2183 q = session.query(Section).filter_by(section=section)
2187 except NoResultFound:
2190 __all__.append('get_section')
2193 def get_sections(session=None):
2195 Returns dictionary of section names -> id mappings
2197 @type session: Session
2198 @param session: Optional SQL session object (a temporary one will be
2199 generated if not supplied)
2202 @return: dictionary of section names -> id mappings
2206 q = session.query(Section)
2208 ret[x.section] = x.section_id
2212 __all__.append('get_sections')
2214 ################################################################################
2216 class DBSource(ORMObject):
2217 def __init__(self, source = None, version = None, maintainer = None, \
2218 changedby = None, poolfile = None, install_date = None):
2219 self.source = source
2220 self.version = version
2221 self.maintainer = maintainer
2222 self.changedby = changedby
2223 self.poolfile = poolfile
2224 self.install_date = install_date
2226 def properties(self):
2227 return ['source', 'source_id', 'maintainer', 'changedby', \
2228 'fingerprint', 'poolfile', 'version', 'suites_count', \
2229 'install_date', 'binaries_count']
2231 def not_null_constraints(self):
2232 return ['source', 'version', 'install_date', 'maintainer', \
2233 'changedby', 'poolfile', 'install_date']
2235 __all__.append('DBSource')
2238 def source_exists(source, source_version, suites = ["any"], session=None):
2240 Ensure that source exists somewhere in the archive for the binary
2241 upload being processed.
2242 1. exact match => 1.0-3
2243 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2245 @type source: string
2246 @param source: source name
2248 @type source_version: string
2249 @param source_version: expected source version
2252 @param suites: list of suites to check in, default I{any}
2254 @type session: Session
2255 @param session: Optional SQLA session object (a temporary one will be
2256 generated if not supplied)
2259 @return: returns 1 if a source with expected version is found, otherwise 0
2266 from daklib.regexes import re_bin_only_nmu
2267 orig_source_version = re_bin_only_nmu.sub('', source_version)
2269 for suite in suites:
2270 q = session.query(DBSource).filter_by(source=source). \
2271 filter(DBSource.version.in_([source_version, orig_source_version]))
2273 # source must exist in suite X, or in some other suite that's
2274 # mapped to X, recursively... silent-maps are counted too,
2275 # unreleased-maps aren't.
2276 maps = cnf.ValueList("SuiteMappings")[:]
2278 maps = [ m.split() for m in maps ]
2279 maps = [ (x[1], x[2]) for x in maps
2280 if x[0] == "map" or x[0] == "silent-map" ]
2283 if x[1] in s and x[0] not in s:
2286 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2291 # No source found so return not ok
2296 __all__.append('source_exists')
2299 def get_suites_source_in(source, session=None):
2301 Returns list of Suite objects which given C{source} name is in
2304 @param source: DBSource package name to search for
2307 @return: list of Suite objects for the given source
2310 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2312 __all__.append('get_suites_source_in')
2315 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2317 Returns list of DBSource objects for given C{source} name and other parameters
2320 @param source: DBSource package name to search for
2322 @type version: str or None
2323 @param version: DBSource version name to search for or None if not applicable
2325 @type dm_upload_allowed: bool
2326 @param dm_upload_allowed: If None, no effect. If True or False, only
2327 return packages with that dm_upload_allowed setting
2329 @type session: Session
2330 @param session: Optional SQL session object (a temporary one will be
2331 generated if not supplied)
2334 @return: list of DBSource objects for the given name (may be empty)
2337 q = session.query(DBSource).filter_by(source=source)
2339 if version is not None:
2340 q = q.filter_by(version=version)
2342 if dm_upload_allowed is not None:
2343 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2347 __all__.append('get_sources_from_name')
2349 # FIXME: This function fails badly if it finds more than 1 source package and
2350 # its implementation is trivial enough to be inlined.
2352 def get_source_in_suite(source, suite, session=None):
2354 Returns a DBSource object for a combination of C{source} and C{suite}.
2356 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2357 - B{suite} - a suite name, eg. I{unstable}
2359 @type source: string
2360 @param source: source package name
2363 @param suite: the suite name
2366 @return: the version for I{source} in I{suite}
2370 q = get_suite(suite, session).get_sources(source)
2373 except NoResultFound:
2376 __all__.append('get_source_in_suite')
2378 ################################################################################
2381 def add_dsc_to_db(u, filename, session=None):
2382 entry = u.pkg.files[filename]
2386 source.source = u.pkg.dsc["source"]
2387 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2388 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2389 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2390 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2391 source.install_date = datetime.now().date()
2393 dsc_component = entry["component"]
2394 dsc_location_id = entry["location id"]
2396 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2398 # Set up a new poolfile if necessary
2399 if not entry.has_key("files id") or not entry["files id"]:
2400 filename = entry["pool name"] + filename
2401 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2403 pfs.append(poolfile)
2404 entry["files id"] = poolfile.file_id
2406 source.poolfile_id = entry["files id"]
2409 suite_names = u.pkg.changes["distribution"].keys()
2410 source.suites = session.query(Suite). \
2411 filter(Suite.suite_name.in_(suite_names)).all()
2413 # Add the source files to the DB (files and dsc_files)
2415 dscfile.source_id = source.source_id
2416 dscfile.poolfile_id = entry["files id"]
2417 session.add(dscfile)
2419 for dsc_file, dentry in u.pkg.dsc_files.items():
2421 df.source_id = source.source_id
2423 # If the .orig tarball is already in the pool, it's
2424 # files id is stored in dsc_files by check_dsc().
2425 files_id = dentry.get("files id", None)
2427 # Find the entry in the files hash
2428 # TODO: Bail out here properly
2430 for f, e in u.pkg.files.items():
2435 if files_id is None:
2436 filename = dfentry["pool name"] + dsc_file
2438 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2439 # FIXME: needs to check for -1/-2 and or handle exception
2440 if found and obj is not None:
2441 files_id = obj.file_id
2444 # If still not found, add it
2445 if files_id is None:
2446 # HACK: Force sha1sum etc into dentry
2447 dentry["sha1sum"] = dfentry["sha1sum"]
2448 dentry["sha256sum"] = dfentry["sha256sum"]
2449 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2450 pfs.append(poolfile)
2451 files_id = poolfile.file_id
2453 poolfile = get_poolfile_by_id(files_id, session)
2454 if poolfile is None:
2455 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2456 pfs.append(poolfile)
2458 df.poolfile_id = files_id
2461 # Add the src_uploaders to the DB
2462 uploader_ids = [source.maintainer_id]
2463 if u.pkg.dsc.has_key("uploaders"):
2464 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2466 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2469 for up_id in uploader_ids:
2470 if added_ids.has_key(up_id):
2472 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2478 su.maintainer_id = up_id
2479 su.source_id = source.source_id
2484 return source, dsc_component, dsc_location_id, pfs
2486 __all__.append('add_dsc_to_db')
2489 def add_deb_to_db(u, filename, session=None):
2491 Contrary to what you might expect, this routine deals with both
2492 debs and udebs. That info is in 'dbtype', whilst 'type' is
2493 'deb' for both of them
2496 entry = u.pkg.files[filename]
2499 bin.package = entry["package"]
2500 bin.version = entry["version"]
2501 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2502 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2503 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2504 bin.binarytype = entry["dbtype"]
2507 filename = entry["pool name"] + filename
2508 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2509 if not entry.get("location id", None):
2510 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2512 if entry.get("files id", None):
2513 poolfile = get_poolfile_by_id(bin.poolfile_id)
2514 bin.poolfile_id = entry["files id"]
2516 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2517 bin.poolfile_id = entry["files id"] = poolfile.file_id
2520 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2521 if len(bin_sources) != 1:
2522 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2523 (bin.package, bin.version, entry["architecture"],
2524 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2526 bin.source_id = bin_sources[0].source_id
2528 # Add and flush object so it has an ID
2531 suite_names = u.pkg.changes["distribution"].keys()
2532 bin.suites = session.query(Suite). \
2533 filter(Suite.suite_name.in_(suite_names)).all()
2537 # Deal with contents - disabled for now
2538 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2540 # print "REJECT\nCould not determine contents of package %s" % bin.package
2541 # session.rollback()
2542 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2546 __all__.append('add_deb_to_db')
2548 ################################################################################
2550 class SourceACL(object):
2551 def __init__(self, *args, **kwargs):
2555 return '<SourceACL %s>' % self.source_acl_id
2557 __all__.append('SourceACL')
2559 ################################################################################
2561 class SrcFormat(object):
2562 def __init__(self, *args, **kwargs):
2566 return '<SrcFormat %s>' % (self.format_name)
2568 __all__.append('SrcFormat')
2570 ################################################################################
2572 class SrcUploader(object):
2573 def __init__(self, *args, **kwargs):
2577 return '<SrcUploader %s>' % self.uploader_id
2579 __all__.append('SrcUploader')
2581 ################################################################################
2583 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2584 ('SuiteID', 'suite_id'),
2585 ('Version', 'version'),
2586 ('Origin', 'origin'),
2588 ('Description', 'description'),
2589 ('Untouchable', 'untouchable'),
2590 ('Announce', 'announce'),
2591 ('Codename', 'codename'),
2592 ('OverrideCodename', 'overridecodename'),
2593 ('ValidTime', 'validtime'),
2594 ('Priority', 'priority'),
2595 ('NotAutomatic', 'notautomatic'),
2596 ('CopyChanges', 'copychanges'),
2597 ('OverrideSuite', 'overridesuite')]
2599 # Why the heck don't we have any UNIQUE constraints in table suite?
2600 # TODO: Add UNIQUE constraints for appropriate columns.
2601 class Suite(ORMObject):
2602 def __init__(self, suite_name = None, version = None):
2603 self.suite_name = suite_name
2604 self.version = version
2606 def properties(self):
2607 return ['suite_name', 'version', 'sources_count', 'binaries_count']
2609 def not_null_constraints(self):
2610 return ['suite_name', 'version']
2612 def __eq__(self, val):
2613 if isinstance(val, str):
2614 return (self.suite_name == val)
2615 # This signals to use the normal comparison operator
2616 return NotImplemented
2618 def __ne__(self, val):
2619 if isinstance(val, str):
2620 return (self.suite_name != val)
2621 # This signals to use the normal comparison operator
2622 return NotImplemented
2626 for disp, field in SUITE_FIELDS:
2627 val = getattr(self, field, None)
2629 ret.append("%s: %s" % (disp, val))
2631 return "\n".join(ret)
2633 def get_architectures(self, skipsrc=False, skipall=False):
2635 Returns list of Architecture objects
2637 @type skipsrc: boolean
2638 @param skipsrc: Whether to skip returning the 'source' architecture entry
2641 @type skipall: boolean
2642 @param skipall: Whether to skip returning the 'all' architecture entry
2646 @return: list of Architecture objects for the given name (may be empty)
2649 q = object_session(self).query(Architecture).with_parent(self)
2651 q = q.filter(Architecture.arch_string != 'source')
2653 q = q.filter(Architecture.arch_string != 'all')
2654 return q.order_by(Architecture.arch_string).all()
2656 def get_sources(self, source):
2658 Returns a query object representing DBSource that is part of C{suite}.
2660 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2662 @type source: string
2663 @param source: source package name
2665 @rtype: sqlalchemy.orm.query.Query
2666 @return: a query of DBSource
2670 session = object_session(self)
2671 return session.query(DBSource).filter_by(source = source). \
2674 __all__.append('Suite')
2677 def get_suite(suite, session=None):
2679 Returns Suite object for given C{suite name}.
2682 @param suite: The name of the suite
2684 @type session: Session
2685 @param session: Optional SQLA session object (a temporary one will be
2686 generated if not supplied)
2689 @return: Suite object for the requested suite name (None if not present)
2692 q = session.query(Suite).filter_by(suite_name=suite)
2696 except NoResultFound:
2699 __all__.append('get_suite')
2701 ################################################################################
2703 # TODO: should be removed because the implementation is too trivial
2705 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2707 Returns list of Architecture objects for given C{suite} name
2710 @param suite: Suite name to search for
2712 @type skipsrc: boolean
2713 @param skipsrc: Whether to skip returning the 'source' architecture entry
2716 @type skipall: boolean
2717 @param skipall: Whether to skip returning the 'all' architecture entry
2720 @type session: Session
2721 @param session: Optional SQL session object (a temporary one will be
2722 generated if not supplied)
2725 @return: list of Architecture objects for the given name (may be empty)
2728 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2730 __all__.append('get_suite_architectures')
2732 ################################################################################
2734 class SuiteSrcFormat(object):
2735 def __init__(self, *args, **kwargs):
2739 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2741 __all__.append('SuiteSrcFormat')
2744 def get_suite_src_formats(suite, session=None):
2746 Returns list of allowed SrcFormat for C{suite}.
2749 @param suite: Suite name to search for
2751 @type session: Session
2752 @param session: Optional SQL session object (a temporary one will be
2753 generated if not supplied)
2756 @return: the list of allowed source formats for I{suite}
2759 q = session.query(SrcFormat)
2760 q = q.join(SuiteSrcFormat)
2761 q = q.join(Suite).filter_by(suite_name=suite)
2762 q = q.order_by('format_name')
2766 __all__.append('get_suite_src_formats')
2768 ################################################################################
2770 class Uid(ORMObject):
2771 def __init__(self, uid = None, name = None):
2775 def __eq__(self, val):
2776 if isinstance(val, str):
2777 return (self.uid == val)
2778 # This signals to use the normal comparison operator
2779 return NotImplemented
2781 def __ne__(self, val):
2782 if isinstance(val, str):
2783 return (self.uid != val)
2784 # This signals to use the normal comparison operator
2785 return NotImplemented
2787 def properties(self):
2788 return ['uid', 'name', 'fingerprint']
2790 def not_null_constraints(self):
2793 __all__.append('Uid')
2796 def get_or_set_uid(uidname, session=None):
2798 Returns uid object for given uidname.
2800 If no matching uidname is found, a row is inserted.
2802 @type uidname: string
2803 @param uidname: The uid to add
2805 @type session: SQLAlchemy
2806 @param session: Optional SQL session object (a temporary one will be
2807 generated if not supplied). If not passed, a commit will be performed at
2808 the end of the function, otherwise the caller is responsible for commiting.
2811 @return: the uid object for the given uidname
2814 q = session.query(Uid).filter_by(uid=uidname)
2818 except NoResultFound:
2822 session.commit_or_flush()
2827 __all__.append('get_or_set_uid')
2830 def get_uid_from_fingerprint(fpr, session=None):
2831 q = session.query(Uid)
2832 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2836 except NoResultFound:
2839 __all__.append('get_uid_from_fingerprint')
2841 ################################################################################
2843 class UploadBlock(object):
2844 def __init__(self, *args, **kwargs):
2848 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2850 __all__.append('UploadBlock')
2852 ################################################################################
2854 class DBConn(object):
2856 database module init.
2860 def __init__(self, *args, **kwargs):
2861 self.__dict__ = self.__shared_state
2863 if not getattr(self, 'initialised', False):
2864 self.initialised = True
2865 self.debug = kwargs.has_key('debug')
2868 def __setuptables(self):
2869 tables_with_primary = (
2880 'changes_pending_binaries',
2881 'changes_pending_files',
2882 'changes_pending_source',
2892 'pending_bin_contents',
2904 # The following tables have primary keys but sqlalchemy
2905 # version 0.5 fails to reflect them correctly with database
2906 # versions before upgrade #41.
2908 #'build_queue_files',
2911 tables_no_primary = (
2913 'changes_pending_files_map',
2914 'changes_pending_source_files',
2915 'changes_pool_files',
2918 'suite_architectures',
2919 'suite_src_formats',
2920 'suite_build_queue_copy',
2922 # see the comment above
2924 'build_queue_files',
2928 'almost_obsolete_all_associations',
2929 'almost_obsolete_src_associations',
2930 'any_associations_source',
2931 'bin_assoc_by_arch',
2932 'bin_associations_binaries',
2933 'binaries_suite_arch',
2934 'binfiles_suite_component_arch',
2937 'newest_all_associations',
2938 'newest_any_associations',
2940 'newest_src_association',
2941 'obsolete_all_associations',
2942 'obsolete_any_associations',
2943 'obsolete_any_by_all_associations',
2944 'obsolete_src_associations',
2946 'src_associations_bin',
2947 'src_associations_src',
2948 'suite_arch_by_name',
2951 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2952 # correctly and that is why we have to use a workaround. It can
2953 # be removed as soon as we switch to version 0.6.
2954 for table_name in tables_with_primary:
2955 table = Table(table_name, self.db_meta, \
2956 Column('id', Integer, primary_key = True), \
2957 autoload=True, useexisting=True)
2958 setattr(self, 'tbl_%s' % table_name, table)
2960 for table_name in tables_no_primary:
2961 table = Table(table_name, self.db_meta, autoload=True)
2962 setattr(self, 'tbl_%s' % table_name, table)
2964 for view_name in views:
2965 view = Table(view_name, self.db_meta, autoload=True)
2966 setattr(self, 'view_%s' % view_name, view)
2968 def __setupmappers(self):
2969 mapper(Architecture, self.tbl_architecture,
2970 properties = dict(arch_id = self.tbl_architecture.c.id,
2971 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2972 order_by='suite_name',
2973 backref=backref('architectures', order_by='arch_string'))),
2974 extension = validator)
2976 mapper(Archive, self.tbl_archive,
2977 properties = dict(archive_id = self.tbl_archive.c.id,
2978 archive_name = self.tbl_archive.c.name))
2980 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2981 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2982 filename = self.tbl_pending_bin_contents.c.filename,
2983 package = self.tbl_pending_bin_contents.c.package,
2984 version = self.tbl_pending_bin_contents.c.version,
2985 arch = self.tbl_pending_bin_contents.c.arch,
2986 otype = self.tbl_pending_bin_contents.c.type))
2988 mapper(DebContents, self.tbl_deb_contents,
2989 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2990 package=self.tbl_deb_contents.c.package,
2991 suite=self.tbl_deb_contents.c.suite,
2992 arch=self.tbl_deb_contents.c.arch,
2993 section=self.tbl_deb_contents.c.section,
2994 filename=self.tbl_deb_contents.c.filename))
2996 mapper(UdebContents, self.tbl_udeb_contents,
2997 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2998 package=self.tbl_udeb_contents.c.package,
2999 suite=self.tbl_udeb_contents.c.suite,
3000 arch=self.tbl_udeb_contents.c.arch,
3001 section=self.tbl_udeb_contents.c.section,
3002 filename=self.tbl_udeb_contents.c.filename))
3004 mapper(BuildQueue, self.tbl_build_queue,
3005 properties = dict(queue_id = self.tbl_build_queue.c.id))
3007 mapper(BuildQueueFile, self.tbl_build_queue_files,
3008 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3009 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3011 mapper(DBBinary, self.tbl_binaries,
3012 properties = dict(binary_id = self.tbl_binaries.c.id,
3013 package = self.tbl_binaries.c.package,
3014 version = self.tbl_binaries.c.version,
3015 maintainer_id = self.tbl_binaries.c.maintainer,
3016 maintainer = relation(Maintainer),
3017 source_id = self.tbl_binaries.c.source,
3018 source = relation(DBSource, backref='binaries'),
3019 arch_id = self.tbl_binaries.c.architecture,
3020 architecture = relation(Architecture),
3021 poolfile_id = self.tbl_binaries.c.file,
3022 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3023 binarytype = self.tbl_binaries.c.type,
3024 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3025 fingerprint = relation(Fingerprint),
3026 install_date = self.tbl_binaries.c.install_date,
3027 suites = relation(Suite, secondary=self.tbl_bin_associations,
3028 backref=backref('binaries', lazy='dynamic'))),
3029 extension = validator)
3031 mapper(BinaryACL, self.tbl_binary_acl,
3032 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3034 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3035 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3036 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3037 architecture = relation(Architecture)))
3039 mapper(Component, self.tbl_component,
3040 properties = dict(component_id = self.tbl_component.c.id,
3041 component_name = self.tbl_component.c.name),
3042 extension = validator)
3044 mapper(DBConfig, self.tbl_config,
3045 properties = dict(config_id = self.tbl_config.c.id))
3047 mapper(DSCFile, self.tbl_dsc_files,
3048 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3049 source_id = self.tbl_dsc_files.c.source,
3050 source = relation(DBSource),
3051 poolfile_id = self.tbl_dsc_files.c.file,
3052 poolfile = relation(PoolFile)))
3054 mapper(PoolFile, self.tbl_files,
3055 properties = dict(file_id = self.tbl_files.c.id,
3056 filesize = self.tbl_files.c.size,
3057 location_id = self.tbl_files.c.location,
3058 location = relation(Location,
3059 # using lazy='dynamic' in the back
3060 # reference because we have A LOT of
3061 # files in one location
3062 backref=backref('files', lazy='dynamic'))),
3063 extension = validator)
3065 mapper(Fingerprint, self.tbl_fingerprint,
3066 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3067 uid_id = self.tbl_fingerprint.c.uid,
3068 uid = relation(Uid),
3069 keyring_id = self.tbl_fingerprint.c.keyring,
3070 keyring = relation(Keyring),
3071 source_acl = relation(SourceACL),
3072 binary_acl = relation(BinaryACL)),
3073 extension = validator)
3075 mapper(Keyring, self.tbl_keyrings,
3076 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3077 keyring_id = self.tbl_keyrings.c.id))
3079 mapper(DBChange, self.tbl_changes,
3080 properties = dict(change_id = self.tbl_changes.c.id,
3081 poolfiles = relation(PoolFile,
3082 secondary=self.tbl_changes_pool_files,
3083 backref="changeslinks"),
3084 seen = self.tbl_changes.c.seen,
3085 source = self.tbl_changes.c.source,
3086 binaries = self.tbl_changes.c.binaries,
3087 architecture = self.tbl_changes.c.architecture,
3088 distribution = self.tbl_changes.c.distribution,
3089 urgency = self.tbl_changes.c.urgency,
3090 maintainer = self.tbl_changes.c.maintainer,
3091 changedby = self.tbl_changes.c.changedby,
3092 date = self.tbl_changes.c.date,
3093 version = self.tbl_changes.c.version,
3094 files = relation(ChangePendingFile,
3095 secondary=self.tbl_changes_pending_files_map,
3096 backref="changesfile"),
3097 in_queue_id = self.tbl_changes.c.in_queue,
3098 in_queue = relation(PolicyQueue,
3099 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3100 approved_for_id = self.tbl_changes.c.approved_for))
3102 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3103 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3105 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3106 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3107 filename = self.tbl_changes_pending_files.c.filename,
3108 size = self.tbl_changes_pending_files.c.size,
3109 md5sum = self.tbl_changes_pending_files.c.md5sum,
3110 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3111 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3113 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3114 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3115 change = relation(DBChange),
3116 maintainer = relation(Maintainer,
3117 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3118 changedby = relation(Maintainer,
3119 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3120 fingerprint = relation(Fingerprint),
3121 source_files = relation(ChangePendingFile,
3122 secondary=self.tbl_changes_pending_source_files,
3123 backref="pending_sources")))
3126 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3127 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3128 keyring = relation(Keyring, backref="keyring_acl_map"),
3129 architecture = relation(Architecture)))
3131 mapper(Location, self.tbl_location,
3132 properties = dict(location_id = self.tbl_location.c.id,
3133 component_id = self.tbl_location.c.component,
3134 component = relation(Component, \
3135 backref=backref('location', uselist = False)),
3136 archive_id = self.tbl_location.c.archive,
3137 archive = relation(Archive),
3138 # FIXME: the 'type' column is old cruft and
3139 # should be removed in the future.
3140 archive_type = self.tbl_location.c.type),
3141 extension = validator)
3143 mapper(Maintainer, self.tbl_maintainer,
3144 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3145 maintains_sources = relation(DBSource, backref='maintainer',
3146 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3147 changed_sources = relation(DBSource, backref='changedby',
3148 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3149 extension = validator)
3151 mapper(NewComment, self.tbl_new_comments,
3152 properties = dict(comment_id = self.tbl_new_comments.c.id))
3154 mapper(Override, self.tbl_override,
3155 properties = dict(suite_id = self.tbl_override.c.suite,
3156 suite = relation(Suite),
3157 package = self.tbl_override.c.package,
3158 component_id = self.tbl_override.c.component,
3159 component = relation(Component),
3160 priority_id = self.tbl_override.c.priority,
3161 priority = relation(Priority),
3162 section_id = self.tbl_override.c.section,
3163 section = relation(Section),
3164 overridetype_id = self.tbl_override.c.type,
3165 overridetype = relation(OverrideType)))
3167 mapper(OverrideType, self.tbl_override_type,
3168 properties = dict(overridetype = self.tbl_override_type.c.type,
3169 overridetype_id = self.tbl_override_type.c.id))
3171 mapper(PolicyQueue, self.tbl_policy_queue,
3172 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3174 mapper(Priority, self.tbl_priority,
3175 properties = dict(priority_id = self.tbl_priority.c.id))
3177 mapper(Section, self.tbl_section,
3178 properties = dict(section_id = self.tbl_section.c.id,
3179 section=self.tbl_section.c.section))
3181 mapper(DBSource, self.tbl_source,
3182 properties = dict(source_id = self.tbl_source.c.id,
3183 version = self.tbl_source.c.version,
3184 maintainer_id = self.tbl_source.c.maintainer,
3185 poolfile_id = self.tbl_source.c.file,
3186 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3187 fingerprint_id = self.tbl_source.c.sig_fpr,
3188 fingerprint = relation(Fingerprint),
3189 changedby_id = self.tbl_source.c.changedby,
3190 srcfiles = relation(DSCFile,
3191 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3192 suites = relation(Suite, secondary=self.tbl_src_associations,
3193 backref=backref('sources', lazy='dynamic')),
3194 srcuploaders = relation(SrcUploader)),
3195 extension = validator)
3197 mapper(SourceACL, self.tbl_source_acl,
3198 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3200 mapper(SrcFormat, self.tbl_src_format,
3201 properties = dict(src_format_id = self.tbl_src_format.c.id,
3202 format_name = self.tbl_src_format.c.format_name))
3204 mapper(SrcUploader, self.tbl_src_uploaders,
3205 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3206 source_id = self.tbl_src_uploaders.c.source,
3207 source = relation(DBSource,
3208 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3209 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3210 maintainer = relation(Maintainer,
3211 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3213 mapper(Suite, self.tbl_suite,
3214 properties = dict(suite_id = self.tbl_suite.c.id,
3215 policy_queue = relation(PolicyQueue),
3216 copy_queues = relation(BuildQueue,
3217 secondary=self.tbl_suite_build_queue_copy)),
3218 extension = validator)
3220 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3221 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3222 suite = relation(Suite, backref='suitesrcformats'),
3223 src_format_id = self.tbl_suite_src_formats.c.src_format,
3224 src_format = relation(SrcFormat)))
3226 mapper(Uid, self.tbl_uid,
3227 properties = dict(uid_id = self.tbl_uid.c.id,
3228 fingerprint = relation(Fingerprint)),
3229 extension = validator)
3231 mapper(UploadBlock, self.tbl_upload_blocks,
3232 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3233 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3234 uid = relation(Uid, backref="uploadblocks")))
3236 ## Connection functions
3237 def __createconn(self):
3238 from config import Config
3242 connstr = "postgres://%s" % cnf["DB::Host"]
3243 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3244 connstr += ":%s" % cnf["DB::Port"]
3245 connstr += "/%s" % cnf["DB::Name"]
3248 connstr = "postgres:///%s" % cnf["DB::Name"]
3249 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3250 connstr += "?port=%s" % cnf["DB::Port"]
3252 self.db_pg = create_engine(connstr, echo=self.debug)
3253 self.db_meta = MetaData()
3254 self.db_meta.bind = self.db_pg
3255 self.db_smaker = sessionmaker(bind=self.db_pg,
3259 self.__setuptables()
3260 self.__setupmappers()
3263 return self.db_smaker()
3265 __all__.append('DBConn')