5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper
63 from sqlalchemy import types as sqltypes
65 # Don't remove this, we re-export the exceptions to scripts which import us
66 from sqlalchemy.exc import *
67 from sqlalchemy.orm.exc import NoResultFound
69 # Only import Config until Queue stuff is changed to store its config
71 from config import Config
72 from textutils import fix_maintainer
73 from dak_exceptions import DBUpdateError, NoSourceFieldError
75 # suppress some deprecation warnings in squeeze related to sqlalchemy
77 warnings.filterwarnings('ignore', \
78 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
80 # TODO: sqlalchemy needs some extra configuration to correctly reflect
81 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
82 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
85 ################################################################################
87 # Patch in support for the debversion field type so that it works during
91 # that is for sqlalchemy 0.6
92 UserDefinedType = sqltypes.UserDefinedType
94 # this one for sqlalchemy 0.5
95 UserDefinedType = sqltypes.TypeEngine
97 class DebVersion(UserDefinedType):
98 def get_col_spec(self):
101 def bind_processor(self, dialect):
104 # ' = None' is needed for sqlalchemy 0.5:
105 def result_processor(self, dialect, coltype = None):
108 sa_major_version = sqlalchemy.__version__[0:3]
109 if sa_major_version in ["0.5", "0.6"]:
110 from sqlalchemy.databases import postgres
111 postgres.ischema_names['debversion'] = DebVersion
113 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
115 ################################################################################
117 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
119 ################################################################################
121 def session_wrapper(fn):
123 Wrapper around common ".., session=None):" handling. If the wrapped
124 function is called without passing 'session', we create a local one
125 and destroy it when the function ends.
127 Also attaches a commit_or_flush method to the session; if we created a
128 local session, this is a synonym for session.commit(), otherwise it is a
129 synonym for session.flush().
132 def wrapped(*args, **kwargs):
133 private_transaction = False
135 # Find the session object
136 session = kwargs.get('session')
139 if len(args) <= len(getargspec(fn)[0]) - 1:
140 # No session specified as last argument or in kwargs
141 private_transaction = True
142 session = kwargs['session'] = DBConn().session()
144 # Session is last argument in args
148 session = args[-1] = DBConn().session()
149 private_transaction = True
151 if private_transaction:
152 session.commit_or_flush = session.commit
154 session.commit_or_flush = session.flush
157 return fn(*args, **kwargs)
159 if private_transaction:
160 # We created a session; close it.
163 wrapped.__doc__ = fn.__doc__
164 wrapped.func_name = fn.func_name
168 __all__.append('session_wrapper')
170 ################################################################################
172 class ORMObject(object):
174 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
175 derived classes must implement the properties() method.
178 def properties(self):
180 This method should be implemented by all derived classes and returns a
181 list of the important properties. The properties 'created' and
182 'modified' will be added automatically. A suffix '_count' should be
183 added to properties that are lists or query objects. The most important
184 property name should be returned as the first element in the list
185 because it is used by repr().
191 Returns a JSON representation of the object based on the properties
192 returned from the properties() method.
195 # add created and modified
196 all_properties = self.properties() + ['created', 'modified']
197 for property in all_properties:
198 # check for list or query
199 if property[-6:] == '_count':
200 real_property = property[:-6]
201 if not hasattr(self, real_property):
203 value = getattr(self, real_property)
204 if hasattr(value, '__len__'):
207 elif hasattr(value, 'count'):
209 value = value.count()
211 raise KeyError('Do not understand property %s.' % property)
213 if not hasattr(self, property):
216 value = getattr(self, property)
220 elif isinstance(value, ORMObject):
221 # use repr() for ORMObject types
224 # we want a string for all other types because json cannot
227 data[property] = value
228 return json.dumps(data)
232 Returns the name of the class.
234 return type(self).__name__
238 Returns a short string representation of the object using the first
239 element from the properties() method.
241 primary_property = self.properties()[0]
242 value = getattr(self, primary_property)
243 return '<%s %s>' % (self.classname(), str(value))
247 Returns a human readable form of the object using the properties()
250 return '<%s %s>' % (self.classname(), self.json())
252 def not_null_constraints(self):
254 Returns a list of properties that must be not NULL. Derived classes
255 should override this method if needed.
259 validation_message = \
260 "Validation failed because property '%s' must not be empty in object\n%s"
264 This function validates the not NULL constraints as returned by
265 not_null_constraints(). It raises the DBUpdateError exception if
268 for property in self.not_null_constraints():
269 # TODO: It is a bit awkward that the mapper configuration allow
270 # directly setting the numeric _id columns. We should get rid of it
272 if hasattr(self, property + '_id') and \
273 getattr(self, property + '_id') is not None:
275 if not hasattr(self, property) or getattr(self, property) is None:
276 raise DBUpdateError(self.validation_message % \
277 (property, str(self)))
281 def get(cls, primary_key, session = None):
283 This is a support function that allows getting an object by its primary
286 Architecture.get(3[, session])
288 instead of the more verbose
290 session.query(Architecture).get(3)
292 return session.query(cls).get(primary_key)
294 def session(self, replace = False):
296 Returns the current session that is associated with the object. May
297 return None is object is in detached state.
300 return object_session(self)
302 def clone(self, session = None):
304 Clones the current object in a new session and returns the new clone. A
305 fresh session is created if the optional session parameter is not
306 provided. The function will fail if a session is provided and has
309 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
310 an existing object to allow several threads to work with their own
311 instances of an ORMObject.
313 WARNING: Only persistent (committed) objects can be cloned. Changes
314 made to the original object that are not committed yet will get lost.
315 The session of the new object will always be rolled back to avoid
319 if self.session() is None:
320 raise RuntimeError( \
321 'Method clone() failed for detached object:\n%s' % self)
322 self.session().flush()
323 mapper = object_mapper(self)
324 primary_key = mapper.primary_key_from_instance(self)
325 object_class = self.__class__
327 session = DBConn().session()
328 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
329 raise RuntimeError( \
330 'Method clone() failed due to unflushed changes in session.')
331 new_object = session.query(object_class).get(primary_key)
333 if new_object is None:
334 raise RuntimeError( \
335 'Method clone() failed for non-persistent object:\n%s' % self)
338 __all__.append('ORMObject')
340 ################################################################################
342 class Validator(MapperExtension):
344 This class calls the validate() method for each instance for the
345 'before_update' and 'before_insert' events. A global object validator is
346 used for configuring the individual mappers.
349 def before_update(self, mapper, connection, instance):
353 def before_insert(self, mapper, connection, instance):
357 validator = Validator()
359 ################################################################################
361 class Architecture(ORMObject):
362 def __init__(self, arch_string = None, description = None):
363 self.arch_string = arch_string
364 self.description = description
366 def __eq__(self, val):
367 if isinstance(val, str):
368 return (self.arch_string== val)
369 # This signals to use the normal comparison operator
370 return NotImplemented
372 def __ne__(self, val):
373 if isinstance(val, str):
374 return (self.arch_string != val)
375 # This signals to use the normal comparison operator
376 return NotImplemented
378 def properties(self):
379 return ['arch_string', 'arch_id', 'suites_count']
381 def not_null_constraints(self):
382 return ['arch_string']
384 __all__.append('Architecture')
387 def get_architecture(architecture, session=None):
389 Returns database id for given C{architecture}.
391 @type architecture: string
392 @param architecture: The name of the architecture
394 @type session: Session
395 @param session: Optional SQLA session object (a temporary one will be
396 generated if not supplied)
399 @return: Architecture object for the given arch (None if not present)
402 q = session.query(Architecture).filter_by(arch_string=architecture)
406 except NoResultFound:
409 __all__.append('get_architecture')
411 # TODO: should be removed because the implementation is too trivial
413 def get_architecture_suites(architecture, session=None):
415 Returns list of Suite objects for given C{architecture} name
417 @type architecture: str
418 @param architecture: Architecture name to search for
420 @type session: Session
421 @param session: Optional SQL session object (a temporary one will be
422 generated if not supplied)
425 @return: list of Suite objects for the given name (may be empty)
428 return get_architecture(architecture, session).suites
430 __all__.append('get_architecture_suites')
432 ################################################################################
434 class Archive(object):
435 def __init__(self, *args, **kwargs):
439 return '<Archive %s>' % self.archive_name
441 __all__.append('Archive')
444 def get_archive(archive, session=None):
446 returns database id for given C{archive}.
448 @type archive: string
449 @param archive: the name of the arhive
451 @type session: Session
452 @param session: Optional SQLA session object (a temporary one will be
453 generated if not supplied)
456 @return: Archive object for the given name (None if not present)
459 archive = archive.lower()
461 q = session.query(Archive).filter_by(archive_name=archive)
465 except NoResultFound:
468 __all__.append('get_archive')
470 ################################################################################
472 class BinContents(ORMObject):
473 def __init__(self, file = None, binary = None):
477 def properties(self):
478 return ['file', 'binary']
480 __all__.append('BinContents')
482 ################################################################################
484 class DBBinary(ORMObject):
485 def __init__(self, package = None, source = None, version = None, \
486 maintainer = None, architecture = None, poolfile = None, \
488 self.package = package
490 self.version = version
491 self.maintainer = maintainer
492 self.architecture = architecture
493 self.poolfile = poolfile
494 self.binarytype = binarytype
496 def properties(self):
497 return ['package', 'version', 'maintainer', 'source', 'architecture', \
498 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
499 'suites_count', 'binary_id', 'contents_count']
501 def not_null_constraints(self):
502 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
505 def get_component_name(self):
506 return self.poolfile.location.component.component_name
508 def scan_contents(self):
510 Yields the contents of the package. Only regular files are yielded and
511 the path names are normalized after converting them from iso8859-1
514 fullpath = self.poolfile.fullpath
515 debdata = Popen(['dpkg-deb', '--fsys-tarfile', fullpath],
516 stdout = PIPE).stdout
517 tar = TarFile.open(fileobj = debdata, mode = 'r|')
518 for member in tar.getmembers():
520 yield normpath(member.name.decode('iso8859-1'))
524 __all__.append('DBBinary')
527 def get_suites_binary_in(package, session=None):
529 Returns list of Suite objects which given C{package} name is in
532 @param package: DBBinary package name to search for
535 @return: list of Suite objects for the given package
538 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
540 __all__.append('get_suites_binary_in')
543 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
545 Returns the component name of the newest binary package in suite_list or
546 None if no package is found. The result can be optionally filtered by a list
547 of architecture names.
550 @param package: DBBinary package name to search for
552 @type suite_list: list of str
553 @param suite_list: list of suite_name items
555 @type arch_list: list of str
556 @param arch_list: optional list of arch_string items that defaults to []
558 @rtype: str or NoneType
559 @return: name of component or None
562 q = session.query(DBBinary).filter_by(package = package). \
563 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
564 if len(arch_list) > 0:
565 q = q.join(DBBinary.architecture). \
566 filter(Architecture.arch_string.in_(arch_list))
567 binary = q.order_by(desc(DBBinary.version)).first()
571 return binary.get_component_name()
573 __all__.append('get_component_by_package_suite')
575 ################################################################################
577 class BinaryACL(object):
578 def __init__(self, *args, **kwargs):
582 return '<BinaryACL %s>' % self.binary_acl_id
584 __all__.append('BinaryACL')
586 ################################################################################
588 class BinaryACLMap(object):
589 def __init__(self, *args, **kwargs):
593 return '<BinaryACLMap %s>' % self.binary_acl_map_id
595 __all__.append('BinaryACLMap')
597 ################################################################################
602 ArchiveDir "%(archivepath)s";
603 OverrideDir "%(overridedir)s";
604 CacheDir "%(cachedir)s";
609 Packages::Compress ". bzip2 gzip";
610 Sources::Compress ". bzip2 gzip";
615 bindirectory "incoming"
620 BinOverride "override.sid.all3";
621 BinCacheDB "packages-accepted.db";
623 FileList "%(filelist)s";
626 Packages::Extensions ".deb .udeb";
629 bindirectory "incoming/"
632 BinOverride "override.sid.all3";
633 SrcOverride "override.sid.all3.src";
634 FileList "%(filelist)s";
638 class BuildQueue(object):
639 def __init__(self, *args, **kwargs):
643 return '<BuildQueue %s>' % self.queue_name
645 def write_metadata(self, starttime, force=False):
646 # Do we write out metafiles?
647 if not (force or self.generate_metadata):
650 session = DBConn().session().object_session(self)
652 fl_fd = fl_name = ac_fd = ac_name = None
654 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
655 startdir = os.getcwd()
658 # Grab files we want to include
659 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
660 # Write file list with newer files
661 (fl_fd, fl_name) = mkstemp()
663 os.write(fl_fd, '%s\n' % n.fullpath)
668 # Write minimal apt.conf
669 # TODO: Remove hardcoding from template
670 (ac_fd, ac_name) = mkstemp()
671 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
673 'cachedir': cnf["Dir::Cache"],
674 'overridedir': cnf["Dir::Override"],
678 # Run apt-ftparchive generate
679 os.chdir(os.path.dirname(ac_name))
680 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
682 # Run apt-ftparchive release
683 # TODO: Eww - fix this
684 bname = os.path.basename(self.path)
688 # We have to remove the Release file otherwise it'll be included in the
691 os.unlink(os.path.join(bname, 'Release'))
695 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
697 # Crude hack with open and append, but this whole section is and should be redone.
698 if self.notautomatic:
699 release=open("Release", "a")
700 release.write("NotAutomatic: yes")
705 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
706 if cnf.has_key("Dinstall::SigningPubKeyring"):
707 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
709 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
711 # Move the files if we got this far
712 os.rename('Release', os.path.join(bname, 'Release'))
714 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
716 # Clean up any left behind files
743 def clean_and_update(self, starttime, Logger, dryrun=False):
744 """WARNING: This routine commits for you"""
745 session = DBConn().session().object_session(self)
747 if self.generate_metadata and not dryrun:
748 self.write_metadata(starttime)
750 # Grab files older than our execution time
751 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
757 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
759 Logger.log(["I: Removing %s from the queue" % o.fullpath])
760 os.unlink(o.fullpath)
763 # If it wasn't there, don't worry
764 if e.errno == ENOENT:
767 # TODO: Replace with proper logging call
768 Logger.log(["E: Could not remove %s" % o.fullpath])
775 for f in os.listdir(self.path):
776 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
780 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
781 except NoResultFound:
782 fp = os.path.join(self.path, f)
784 Logger.log(["I: Would remove unused link %s" % fp])
786 Logger.log(["I: Removing unused link %s" % fp])
790 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
792 def add_file_from_pool(self, poolfile):
793 """Copies a file into the pool. Assumes that the PoolFile object is
794 attached to the same SQLAlchemy session as the Queue object is.
796 The caller is responsible for committing after calling this function."""
797 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
799 # Check if we have a file of this name or this ID already
800 for f in self.queuefiles:
801 if f.fileid is not None and f.fileid == poolfile.file_id or \
802 f.poolfile.filename == poolfile_basename:
803 # In this case, update the BuildQueueFile entry so we
804 # don't remove it too early
805 f.lastused = datetime.now()
806 DBConn().session().object_session(poolfile).add(f)
809 # Prepare BuildQueueFile object
810 qf = BuildQueueFile()
811 qf.build_queue_id = self.queue_id
812 qf.lastused = datetime.now()
813 qf.filename = poolfile_basename
815 targetpath = poolfile.fullpath
816 queuepath = os.path.join(self.path, poolfile_basename)
820 # We need to copy instead of symlink
822 utils.copy(targetpath, queuepath)
823 # NULL in the fileid field implies a copy
826 os.symlink(targetpath, queuepath)
827 qf.fileid = poolfile.file_id
831 # Get the same session as the PoolFile is using and add the qf to it
832 DBConn().session().object_session(poolfile).add(qf)
837 __all__.append('BuildQueue')
840 def get_build_queue(queuename, session=None):
842 Returns BuildQueue object for given C{queue name}, creating it if it does not
845 @type queuename: string
846 @param queuename: The name of the queue
848 @type session: Session
849 @param session: Optional SQLA session object (a temporary one will be
850 generated if not supplied)
853 @return: BuildQueue object for the given queue
856 q = session.query(BuildQueue).filter_by(queue_name=queuename)
860 except NoResultFound:
863 __all__.append('get_build_queue')
865 ################################################################################
867 class BuildQueueFile(object):
868 def __init__(self, *args, **kwargs):
872 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
876 return os.path.join(self.buildqueue.path, self.filename)
879 __all__.append('BuildQueueFile')
881 ################################################################################
883 class ChangePendingBinary(object):
884 def __init__(self, *args, **kwargs):
888 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
890 __all__.append('ChangePendingBinary')
892 ################################################################################
894 class ChangePendingFile(object):
895 def __init__(self, *args, **kwargs):
899 return '<ChangePendingFile %s>' % self.change_pending_file_id
901 __all__.append('ChangePendingFile')
903 ################################################################################
905 class ChangePendingSource(object):
906 def __init__(self, *args, **kwargs):
910 return '<ChangePendingSource %s>' % self.change_pending_source_id
912 __all__.append('ChangePendingSource')
914 ################################################################################
916 class Component(ORMObject):
917 def __init__(self, component_name = None):
918 self.component_name = component_name
920 def __eq__(self, val):
921 if isinstance(val, str):
922 return (self.component_name == val)
923 # This signals to use the normal comparison operator
924 return NotImplemented
926 def __ne__(self, val):
927 if isinstance(val, str):
928 return (self.component_name != val)
929 # This signals to use the normal comparison operator
930 return NotImplemented
932 def properties(self):
933 return ['component_name', 'component_id', 'description', \
934 'location_count', 'meets_dfsg', 'overrides_count']
936 def not_null_constraints(self):
937 return ['component_name']
940 __all__.append('Component')
943 def get_component(component, session=None):
945 Returns database id for given C{component}.
947 @type component: string
948 @param component: The name of the override type
951 @return: the database id for the given component
954 component = component.lower()
956 q = session.query(Component).filter_by(component_name=component)
960 except NoResultFound:
963 __all__.append('get_component')
965 ################################################################################
967 class DBConfig(object):
968 def __init__(self, *args, **kwargs):
972 return '<DBConfig %s>' % self.name
974 __all__.append('DBConfig')
976 ################################################################################
979 def get_or_set_contents_file_id(filename, session=None):
981 Returns database id for given filename.
983 If no matching file is found, a row is inserted.
985 @type filename: string
986 @param filename: The filename
987 @type session: SQLAlchemy
988 @param session: Optional SQL session object (a temporary one will be
989 generated if not supplied). If not passed, a commit will be performed at
990 the end of the function, otherwise the caller is responsible for commiting.
993 @return: the database id for the given component
996 q = session.query(ContentFilename).filter_by(filename=filename)
999 ret = q.one().cafilename_id
1000 except NoResultFound:
1001 cf = ContentFilename()
1002 cf.filename = filename
1004 session.commit_or_flush()
1005 ret = cf.cafilename_id
1009 __all__.append('get_or_set_contents_file_id')
1012 def get_contents(suite, overridetype, section=None, session=None):
1014 Returns contents for a suite / overridetype combination, limiting
1015 to a section if not None.
1018 @param suite: Suite object
1020 @type overridetype: OverrideType
1021 @param overridetype: OverrideType object
1023 @type section: Section
1024 @param section: Optional section object to limit results to
1026 @type session: SQLAlchemy
1027 @param session: Optional SQL session object (a temporary one will be
1028 generated if not supplied)
1030 @rtype: ResultsProxy
1031 @return: ResultsProxy object set up to return tuples of (filename, section,
1035 # find me all of the contents for a given suite
1036 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1040 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1041 JOIN content_file_names n ON (c.filename=n.id)
1042 JOIN binaries b ON (b.id=c.binary_pkg)
1043 JOIN override o ON (o.package=b.package)
1044 JOIN section s ON (s.id=o.section)
1045 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1046 AND b.type=:overridetypename"""
1048 vals = {'suiteid': suite.suite_id,
1049 'overridetypeid': overridetype.overridetype_id,
1050 'overridetypename': overridetype.overridetype}
1052 if section is not None:
1053 contents_q += " AND s.id = :sectionid"
1054 vals['sectionid'] = section.section_id
1056 contents_q += " ORDER BY fn"
1058 return session.execute(contents_q, vals)
1060 __all__.append('get_contents')
1062 ################################################################################
1064 class ContentFilepath(object):
1065 def __init__(self, *args, **kwargs):
1069 return '<ContentFilepath %s>' % self.filepath
1071 __all__.append('ContentFilepath')
1074 def get_or_set_contents_path_id(filepath, session=None):
1076 Returns database id for given path.
1078 If no matching file is found, a row is inserted.
1080 @type filepath: string
1081 @param filepath: The filepath
1083 @type session: SQLAlchemy
1084 @param session: Optional SQL session object (a temporary one will be
1085 generated if not supplied). If not passed, a commit will be performed at
1086 the end of the function, otherwise the caller is responsible for commiting.
1089 @return: the database id for the given path
1092 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1095 ret = q.one().cafilepath_id
1096 except NoResultFound:
1097 cf = ContentFilepath()
1098 cf.filepath = filepath
1100 session.commit_or_flush()
1101 ret = cf.cafilepath_id
1105 __all__.append('get_or_set_contents_path_id')
1107 ################################################################################
1109 class ContentAssociation(object):
1110 def __init__(self, *args, **kwargs):
1114 return '<ContentAssociation %s>' % self.ca_id
1116 __all__.append('ContentAssociation')
1118 def insert_content_paths(binary_id, fullpaths, session=None):
1120 Make sure given path is associated with given binary id
1122 @type binary_id: int
1123 @param binary_id: the id of the binary
1124 @type fullpaths: list
1125 @param fullpaths: the list of paths of the file being associated with the binary
1126 @type session: SQLAlchemy session
1127 @param session: Optional SQLAlchemy session. If this is passed, the caller
1128 is responsible for ensuring a transaction has begun and committing the
1129 results or rolling back based on the result code. If not passed, a commit
1130 will be performed at the end of the function, otherwise the caller is
1131 responsible for commiting.
1133 @return: True upon success
1136 privatetrans = False
1138 session = DBConn().session()
1143 def generate_path_dicts():
1144 for fullpath in fullpaths:
1145 if fullpath.startswith( './' ):
1146 fullpath = fullpath[2:]
1148 yield {'filename':fullpath, 'id': binary_id }
1150 for d in generate_path_dicts():
1151 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1160 traceback.print_exc()
1162 # Only rollback if we set up the session ourself
1169 __all__.append('insert_content_paths')
1171 ################################################################################
1173 class DSCFile(object):
1174 def __init__(self, *args, **kwargs):
1178 return '<DSCFile %s>' % self.dscfile_id
1180 __all__.append('DSCFile')
1183 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1185 Returns a list of DSCFiles which may be empty
1187 @type dscfile_id: int (optional)
1188 @param dscfile_id: the dscfile_id of the DSCFiles to find
1190 @type source_id: int (optional)
1191 @param source_id: the source id related to the DSCFiles to find
1193 @type poolfile_id: int (optional)
1194 @param poolfile_id: the poolfile id related to the DSCFiles to find
1197 @return: Possibly empty list of DSCFiles
1200 q = session.query(DSCFile)
1202 if dscfile_id is not None:
1203 q = q.filter_by(dscfile_id=dscfile_id)
1205 if source_id is not None:
1206 q = q.filter_by(source_id=source_id)
1208 if poolfile_id is not None:
1209 q = q.filter_by(poolfile_id=poolfile_id)
1213 __all__.append('get_dscfiles')
1215 ################################################################################
1217 class PoolFile(ORMObject):
1218 def __init__(self, filename = None, location = None, filesize = -1, \
1220 self.filename = filename
1221 self.location = location
1222 self.filesize = filesize
1223 self.md5sum = md5sum
1227 return os.path.join(self.location.path, self.filename)
1229 def is_valid(self, filesize = -1, md5sum = None):
1230 return self.filesize == long(filesize) and self.md5sum == md5sum
1232 def properties(self):
1233 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1234 'sha256sum', 'location', 'source', 'binary', 'last_used']
1236 def not_null_constraints(self):
1237 return ['filename', 'md5sum', 'location']
1239 __all__.append('PoolFile')
1242 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1245 (ValidFileFound [boolean], PoolFile object or None)
1247 @type filename: string
1248 @param filename: the filename of the file to check against the DB
1251 @param filesize: the size of the file to check against the DB
1253 @type md5sum: string
1254 @param md5sum: the md5sum of the file to check against the DB
1256 @type location_id: int
1257 @param location_id: the id of the location to look in
1260 @return: Tuple of length 2.
1261 - If valid pool file found: (C{True}, C{PoolFile object})
1262 - If valid pool file not found:
1263 - (C{False}, C{None}) if no file found
1264 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1267 poolfile = session.query(Location).get(location_id). \
1268 files.filter_by(filename=filename).first()
1270 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1273 return (valid, poolfile)
1275 __all__.append('check_poolfile')
1277 # TODO: the implementation can trivially be inlined at the place where the
1278 # function is called
1280 def get_poolfile_by_id(file_id, session=None):
1282 Returns a PoolFile objects or None for the given id
1285 @param file_id: the id of the file to look for
1287 @rtype: PoolFile or None
1288 @return: either the PoolFile object or None
1291 return session.query(PoolFile).get(file_id)
1293 __all__.append('get_poolfile_by_id')
1296 def get_poolfile_like_name(filename, session=None):
1298 Returns an array of PoolFile objects which are like the given name
1300 @type filename: string
1301 @param filename: the filename of the file to check against the DB
1304 @return: array of PoolFile objects
1307 # TODO: There must be a way of properly using bind parameters with %FOO%
1308 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1312 __all__.append('get_poolfile_like_name')
1315 def add_poolfile(filename, datadict, location_id, session=None):
1317 Add a new file to the pool
1319 @type filename: string
1320 @param filename: filename
1322 @type datadict: dict
1323 @param datadict: dict with needed data
1325 @type location_id: int
1326 @param location_id: database id of the location
1329 @return: the PoolFile object created
1331 poolfile = PoolFile()
1332 poolfile.filename = filename
1333 poolfile.filesize = datadict["size"]
1334 poolfile.md5sum = datadict["md5sum"]
1335 poolfile.sha1sum = datadict["sha1sum"]
1336 poolfile.sha256sum = datadict["sha256sum"]
1337 poolfile.location_id = location_id
1339 session.add(poolfile)
1340 # Flush to get a file id (NB: This is not a commit)
1345 __all__.append('add_poolfile')
1347 ################################################################################
1349 class Fingerprint(ORMObject):
1350 def __init__(self, fingerprint = None):
1351 self.fingerprint = fingerprint
1353 def properties(self):
1354 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1357 def not_null_constraints(self):
1358 return ['fingerprint']
1360 __all__.append('Fingerprint')
1363 def get_fingerprint(fpr, session=None):
1365 Returns Fingerprint object for given fpr.
1368 @param fpr: The fpr to find / add
1370 @type session: SQLAlchemy
1371 @param session: Optional SQL session object (a temporary one will be
1372 generated if not supplied).
1375 @return: the Fingerprint object for the given fpr or None
1378 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1382 except NoResultFound:
1387 __all__.append('get_fingerprint')
1390 def get_or_set_fingerprint(fpr, session=None):
1392 Returns Fingerprint object for given fpr.
1394 If no matching fpr is found, a row is inserted.
1397 @param fpr: The fpr to find / add
1399 @type session: SQLAlchemy
1400 @param session: Optional SQL session object (a temporary one will be
1401 generated if not supplied). If not passed, a commit will be performed at
1402 the end of the function, otherwise the caller is responsible for commiting.
1403 A flush will be performed either way.
1406 @return: the Fingerprint object for the given fpr
1409 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1413 except NoResultFound:
1414 fingerprint = Fingerprint()
1415 fingerprint.fingerprint = fpr
1416 session.add(fingerprint)
1417 session.commit_or_flush()
1422 __all__.append('get_or_set_fingerprint')
1424 ################################################################################
1426 # Helper routine for Keyring class
1427 def get_ldap_name(entry):
1429 for k in ["cn", "mn", "sn"]:
1431 if ret and ret[0] != "" and ret[0] != "-":
1433 return " ".join(name)
1435 ################################################################################
1437 class Keyring(object):
1438 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1439 " --with-colons --fingerprint --fingerprint"
1444 def __init__(self, *args, **kwargs):
1448 return '<Keyring %s>' % self.keyring_name
1450 def de_escape_gpg_str(self, txt):
1451 esclist = re.split(r'(\\x..)', txt)
1452 for x in range(1,len(esclist),2):
1453 esclist[x] = "%c" % (int(esclist[x][2:],16))
1454 return "".join(esclist)
1456 def parse_address(self, uid):
1457 """parses uid and returns a tuple of real name and email address"""
1459 (name, address) = email.Utils.parseaddr(uid)
1460 name = re.sub(r"\s*[(].*[)]", "", name)
1461 name = self.de_escape_gpg_str(name)
1464 return (name, address)
1466 def load_keys(self, keyring):
1467 if not self.keyring_id:
1468 raise Exception('Must be initialized with database information')
1470 k = os.popen(self.gpg_invocation % keyring, "r")
1474 for line in k.xreadlines():
1475 field = line.split(":")
1476 if field[0] == "pub":
1479 (name, addr) = self.parse_address(field[9])
1481 self.keys[key]["email"] = addr
1482 self.keys[key]["name"] = name
1483 self.keys[key]["fingerprints"] = []
1485 elif key and field[0] == "sub" and len(field) >= 12:
1486 signingkey = ("s" in field[11])
1487 elif key and field[0] == "uid":
1488 (name, addr) = self.parse_address(field[9])
1489 if "email" not in self.keys[key] and "@" in addr:
1490 self.keys[key]["email"] = addr
1491 self.keys[key]["name"] = name
1492 elif signingkey and field[0] == "fpr":
1493 self.keys[key]["fingerprints"].append(field[9])
1494 self.fpr_lookup[field[9]] = key
1496 def import_users_from_ldap(self, session):
1500 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1501 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1503 l = ldap.open(LDAPServer)
1504 l.simple_bind_s("","")
1505 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1506 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1507 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1509 ldap_fin_uid_id = {}
1516 uid = entry["uid"][0]
1517 name = get_ldap_name(entry)
1518 fingerprints = entry["keyFingerPrint"]
1520 for f in fingerprints:
1521 key = self.fpr_lookup.get(f, None)
1522 if key not in self.keys:
1524 self.keys[key]["uid"] = uid
1528 keyid = get_or_set_uid(uid, session).uid_id
1529 byuid[keyid] = (uid, name)
1530 byname[uid] = (keyid, name)
1532 return (byname, byuid)
1534 def generate_users_from_keyring(self, format, session):
1538 for x in self.keys.keys():
1539 if "email" not in self.keys[x]:
1541 self.keys[x]["uid"] = format % "invalid-uid"
1543 uid = format % self.keys[x]["email"]
1544 keyid = get_or_set_uid(uid, session).uid_id
1545 byuid[keyid] = (uid, self.keys[x]["name"])
1546 byname[uid] = (keyid, self.keys[x]["name"])
1547 self.keys[x]["uid"] = uid
1550 uid = format % "invalid-uid"
1551 keyid = get_or_set_uid(uid, session).uid_id
1552 byuid[keyid] = (uid, "ungeneratable user id")
1553 byname[uid] = (keyid, "ungeneratable user id")
1555 return (byname, byuid)
1557 __all__.append('Keyring')
1560 def get_keyring(keyring, session=None):
1562 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1563 If C{keyring} already has an entry, simply return the existing Keyring
1565 @type keyring: string
1566 @param keyring: the keyring name
1569 @return: the Keyring object for this keyring
1572 q = session.query(Keyring).filter_by(keyring_name=keyring)
1576 except NoResultFound:
1579 __all__.append('get_keyring')
1581 ################################################################################
1583 class KeyringACLMap(object):
1584 def __init__(self, *args, **kwargs):
1588 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1590 __all__.append('KeyringACLMap')
1592 ################################################################################
1594 class DBChange(object):
1595 def __init__(self, *args, **kwargs):
1599 return '<DBChange %s>' % self.changesname
1601 def clean_from_queue(self):
1602 session = DBConn().session().object_session(self)
1604 # Remove changes_pool_files entries
1607 # Remove changes_pending_files references
1610 # Clear out of queue
1611 self.in_queue = None
1612 self.approved_for_id = None
1614 __all__.append('DBChange')
1617 def get_dbchange(filename, session=None):
1619 returns DBChange object for given C{filename}.
1621 @type filename: string
1622 @param filename: the name of the file
1624 @type session: Session
1625 @param session: Optional SQLA session object (a temporary one will be
1626 generated if not supplied)
1629 @return: DBChange object for the given filename (C{None} if not present)
1632 q = session.query(DBChange).filter_by(changesname=filename)
1636 except NoResultFound:
1639 __all__.append('get_dbchange')
1641 ################################################################################
1643 class Location(ORMObject):
1644 def __init__(self, path = None, component = None):
1646 self.component = component
1647 # the column 'type' should go away, see comment at mapper
1648 self.archive_type = 'pool'
1650 def properties(self):
1651 return ['path', 'location_id', 'archive_type', 'component', \
1654 def not_null_constraints(self):
1655 return ['path', 'archive_type']
1657 __all__.append('Location')
1660 def get_location(location, component=None, archive=None, session=None):
1662 Returns Location object for the given combination of location, component
1665 @type location: string
1666 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1668 @type component: string
1669 @param component: the component name (if None, no restriction applied)
1671 @type archive: string
1672 @param archive: the archive name (if None, no restriction applied)
1674 @rtype: Location / None
1675 @return: Either a Location object or None if one can't be found
1678 q = session.query(Location).filter_by(path=location)
1680 if archive is not None:
1681 q = q.join(Archive).filter_by(archive_name=archive)
1683 if component is not None:
1684 q = q.join(Component).filter_by(component_name=component)
1688 except NoResultFound:
1691 __all__.append('get_location')
1693 ################################################################################
1695 class Maintainer(ORMObject):
1696 def __init__(self, name = None):
1699 def properties(self):
1700 return ['name', 'maintainer_id']
1702 def not_null_constraints(self):
1705 def get_split_maintainer(self):
1706 if not hasattr(self, 'name') or self.name is None:
1707 return ('', '', '', '')
1709 return fix_maintainer(self.name.strip())
1711 __all__.append('Maintainer')
1714 def get_or_set_maintainer(name, session=None):
1716 Returns Maintainer object for given maintainer name.
1718 If no matching maintainer name is found, a row is inserted.
1721 @param name: The maintainer name to add
1723 @type session: SQLAlchemy
1724 @param session: Optional SQL session object (a temporary one will be
1725 generated if not supplied). If not passed, a commit will be performed at
1726 the end of the function, otherwise the caller is responsible for commiting.
1727 A flush will be performed either way.
1730 @return: the Maintainer object for the given maintainer
1733 q = session.query(Maintainer).filter_by(name=name)
1736 except NoResultFound:
1737 maintainer = Maintainer()
1738 maintainer.name = name
1739 session.add(maintainer)
1740 session.commit_or_flush()
1745 __all__.append('get_or_set_maintainer')
1748 def get_maintainer(maintainer_id, session=None):
1750 Return the name of the maintainer behind C{maintainer_id} or None if that
1751 maintainer_id is invalid.
1753 @type maintainer_id: int
1754 @param maintainer_id: the id of the maintainer
1757 @return: the Maintainer with this C{maintainer_id}
1760 return session.query(Maintainer).get(maintainer_id)
1762 __all__.append('get_maintainer')
1764 ################################################################################
1766 class NewComment(object):
1767 def __init__(self, *args, **kwargs):
1771 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1773 __all__.append('NewComment')
1776 def has_new_comment(package, version, session=None):
1778 Returns true if the given combination of C{package}, C{version} has a comment.
1780 @type package: string
1781 @param package: name of the package
1783 @type version: string
1784 @param version: package version
1786 @type session: Session
1787 @param session: Optional SQLA session object (a temporary one will be
1788 generated if not supplied)
1794 q = session.query(NewComment)
1795 q = q.filter_by(package=package)
1796 q = q.filter_by(version=version)
1798 return bool(q.count() > 0)
1800 __all__.append('has_new_comment')
1803 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1805 Returns (possibly empty) list of NewComment objects for the given
1808 @type package: string (optional)
1809 @param package: name of the package
1811 @type version: string (optional)
1812 @param version: package version
1814 @type comment_id: int (optional)
1815 @param comment_id: An id of a comment
1817 @type session: Session
1818 @param session: Optional SQLA session object (a temporary one will be
1819 generated if not supplied)
1822 @return: A (possibly empty) list of NewComment objects will be returned
1825 q = session.query(NewComment)
1826 if package is not None: q = q.filter_by(package=package)
1827 if version is not None: q = q.filter_by(version=version)
1828 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1832 __all__.append('get_new_comments')
1834 ################################################################################
1836 class Override(ORMObject):
1837 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1838 section = None, priority = None):
1839 self.package = package
1841 self.component = component
1842 self.overridetype = overridetype
1843 self.section = section
1844 self.priority = priority
1846 def properties(self):
1847 return ['package', 'suite', 'component', 'overridetype', 'section', \
1850 def not_null_constraints(self):
1851 return ['package', 'suite', 'component', 'overridetype', 'section']
1853 __all__.append('Override')
1856 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1858 Returns Override object for the given parameters
1860 @type package: string
1861 @param package: The name of the package
1863 @type suite: string, list or None
1864 @param suite: The name of the suite (or suites if a list) to limit to. If
1865 None, don't limit. Defaults to None.
1867 @type component: string, list or None
1868 @param component: The name of the component (or components if a list) to
1869 limit to. If None, don't limit. Defaults to None.
1871 @type overridetype: string, list or None
1872 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1873 limit to. If None, don't limit. Defaults to None.
1875 @type session: Session
1876 @param session: Optional SQLA session object (a temporary one will be
1877 generated if not supplied)
1880 @return: A (possibly empty) list of Override objects will be returned
1883 q = session.query(Override)
1884 q = q.filter_by(package=package)
1886 if suite is not None:
1887 if not isinstance(suite, list): suite = [suite]
1888 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1890 if component is not None:
1891 if not isinstance(component, list): component = [component]
1892 q = q.join(Component).filter(Component.component_name.in_(component))
1894 if overridetype is not None:
1895 if not isinstance(overridetype, list): overridetype = [overridetype]
1896 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1900 __all__.append('get_override')
1903 ################################################################################
1905 class OverrideType(ORMObject):
1906 def __init__(self, overridetype = None):
1907 self.overridetype = overridetype
1909 def properties(self):
1910 return ['overridetype', 'overridetype_id', 'overrides_count']
1912 def not_null_constraints(self):
1913 return ['overridetype']
1915 __all__.append('OverrideType')
1918 def get_override_type(override_type, session=None):
1920 Returns OverrideType object for given C{override type}.
1922 @type override_type: string
1923 @param override_type: The name of the override type
1925 @type session: Session
1926 @param session: Optional SQLA session object (a temporary one will be
1927 generated if not supplied)
1930 @return: the database id for the given override type
1933 q = session.query(OverrideType).filter_by(overridetype=override_type)
1937 except NoResultFound:
1940 __all__.append('get_override_type')
1942 ################################################################################
1944 class DebContents(object):
1945 def __init__(self, *args, **kwargs):
1949 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1951 __all__.append('DebContents')
1954 class UdebContents(object):
1955 def __init__(self, *args, **kwargs):
1959 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1961 __all__.append('UdebContents')
1963 class PendingBinContents(object):
1964 def __init__(self, *args, **kwargs):
1968 return '<PendingBinContents %s>' % self.contents_id
1970 __all__.append('PendingBinContents')
1972 def insert_pending_content_paths(package,
1977 Make sure given paths are temporarily associated with given
1981 @param package: the package to associate with should have been read in from the binary control file
1982 @type fullpaths: list
1983 @param fullpaths: the list of paths of the file being associated with the binary
1984 @type session: SQLAlchemy session
1985 @param session: Optional SQLAlchemy session. If this is passed, the caller
1986 is responsible for ensuring a transaction has begun and committing the
1987 results or rolling back based on the result code. If not passed, a commit
1988 will be performed at the end of the function
1990 @return: True upon success, False if there is a problem
1993 privatetrans = False
1996 session = DBConn().session()
2000 arch = get_architecture(package['Architecture'], session)
2001 arch_id = arch.arch_id
2003 # Remove any already existing recorded files for this package
2004 q = session.query(PendingBinContents)
2005 q = q.filter_by(package=package['Package'])
2006 q = q.filter_by(version=package['Version'])
2007 q = q.filter_by(architecture=arch_id)
2010 for fullpath in fullpaths:
2012 if fullpath.startswith( "./" ):
2013 fullpath = fullpath[2:]
2015 pca = PendingBinContents()
2016 pca.package = package['Package']
2017 pca.version = package['Version']
2019 pca.architecture = arch_id
2022 pca.type = 8 # gross
2024 pca.type = 7 # also gross
2027 # Only commit if we set up the session ourself
2035 except Exception, e:
2036 traceback.print_exc()
2038 # Only rollback if we set up the session ourself
2045 __all__.append('insert_pending_content_paths')
2047 ################################################################################
2049 class PolicyQueue(object):
2050 def __init__(self, *args, **kwargs):
2054 return '<PolicyQueue %s>' % self.queue_name
2056 __all__.append('PolicyQueue')
2059 def get_policy_queue(queuename, session=None):
2061 Returns PolicyQueue object for given C{queue name}
2063 @type queuename: string
2064 @param queuename: The name of the queue
2066 @type session: Session
2067 @param session: Optional SQLA session object (a temporary one will be
2068 generated if not supplied)
2071 @return: PolicyQueue object for the given queue
2074 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2078 except NoResultFound:
2081 __all__.append('get_policy_queue')
2084 def get_policy_queue_from_path(pathname, session=None):
2086 Returns PolicyQueue object for given C{path name}
2088 @type queuename: string
2089 @param queuename: The path
2091 @type session: Session
2092 @param session: Optional SQLA session object (a temporary one will be
2093 generated if not supplied)
2096 @return: PolicyQueue object for the given queue
2099 q = session.query(PolicyQueue).filter_by(path=pathname)
2103 except NoResultFound:
2106 __all__.append('get_policy_queue_from_path')
2108 ################################################################################
2110 class Priority(ORMObject):
2111 def __init__(self, priority = None, level = None):
2112 self.priority = priority
2115 def properties(self):
2116 return ['priority', 'priority_id', 'level', 'overrides_count']
2118 def not_null_constraints(self):
2119 return ['priority', 'level']
2121 def __eq__(self, val):
2122 if isinstance(val, str):
2123 return (self.priority == val)
2124 # This signals to use the normal comparison operator
2125 return NotImplemented
2127 def __ne__(self, val):
2128 if isinstance(val, str):
2129 return (self.priority != val)
2130 # This signals to use the normal comparison operator
2131 return NotImplemented
2133 __all__.append('Priority')
2136 def get_priority(priority, session=None):
2138 Returns Priority object for given C{priority name}.
2140 @type priority: string
2141 @param priority: The name of the priority
2143 @type session: Session
2144 @param session: Optional SQLA session object (a temporary one will be
2145 generated if not supplied)
2148 @return: Priority object for the given priority
2151 q = session.query(Priority).filter_by(priority=priority)
2155 except NoResultFound:
2158 __all__.append('get_priority')
2161 def get_priorities(session=None):
2163 Returns dictionary of priority names -> id mappings
2165 @type session: Session
2166 @param session: Optional SQL session object (a temporary one will be
2167 generated if not supplied)
2170 @return: dictionary of priority names -> id mappings
2174 q = session.query(Priority)
2176 ret[x.priority] = x.priority_id
2180 __all__.append('get_priorities')
2182 ################################################################################
2184 class Section(ORMObject):
2185 def __init__(self, section = None):
2186 self.section = section
2188 def properties(self):
2189 return ['section', 'section_id', 'overrides_count']
2191 def not_null_constraints(self):
2194 def __eq__(self, val):
2195 if isinstance(val, str):
2196 return (self.section == val)
2197 # This signals to use the normal comparison operator
2198 return NotImplemented
2200 def __ne__(self, val):
2201 if isinstance(val, str):
2202 return (self.section != val)
2203 # This signals to use the normal comparison operator
2204 return NotImplemented
2206 __all__.append('Section')
2209 def get_section(section, session=None):
2211 Returns Section object for given C{section name}.
2213 @type section: string
2214 @param section: The name of the section
2216 @type session: Session
2217 @param session: Optional SQLA session object (a temporary one will be
2218 generated if not supplied)
2221 @return: Section object for the given section name
2224 q = session.query(Section).filter_by(section=section)
2228 except NoResultFound:
2231 __all__.append('get_section')
2234 def get_sections(session=None):
2236 Returns dictionary of section names -> id mappings
2238 @type session: Session
2239 @param session: Optional SQL session object (a temporary one will be
2240 generated if not supplied)
2243 @return: dictionary of section names -> id mappings
2247 q = session.query(Section)
2249 ret[x.section] = x.section_id
2253 __all__.append('get_sections')
2255 ################################################################################
2257 class DBSource(ORMObject):
2258 def __init__(self, source = None, version = None, maintainer = None, \
2259 changedby = None, poolfile = None, install_date = None):
2260 self.source = source
2261 self.version = version
2262 self.maintainer = maintainer
2263 self.changedby = changedby
2264 self.poolfile = poolfile
2265 self.install_date = install_date
2267 def properties(self):
2268 return ['source', 'source_id', 'maintainer', 'changedby', \
2269 'fingerprint', 'poolfile', 'version', 'suites_count', \
2270 'install_date', 'binaries_count']
2272 def not_null_constraints(self):
2273 return ['source', 'version', 'install_date', 'maintainer', \
2274 'changedby', 'poolfile', 'install_date']
2276 __all__.append('DBSource')
2279 def source_exists(source, source_version, suites = ["any"], session=None):
2281 Ensure that source exists somewhere in the archive for the binary
2282 upload being processed.
2283 1. exact match => 1.0-3
2284 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2286 @type source: string
2287 @param source: source name
2289 @type source_version: string
2290 @param source_version: expected source version
2293 @param suites: list of suites to check in, default I{any}
2295 @type session: Session
2296 @param session: Optional SQLA session object (a temporary one will be
2297 generated if not supplied)
2300 @return: returns 1 if a source with expected version is found, otherwise 0
2307 from daklib.regexes import re_bin_only_nmu
2308 orig_source_version = re_bin_only_nmu.sub('', source_version)
2310 for suite in suites:
2311 q = session.query(DBSource).filter_by(source=source). \
2312 filter(DBSource.version.in_([source_version, orig_source_version]))
2314 # source must exist in suite X, or in some other suite that's
2315 # mapped to X, recursively... silent-maps are counted too,
2316 # unreleased-maps aren't.
2317 maps = cnf.ValueList("SuiteMappings")[:]
2319 maps = [ m.split() for m in maps ]
2320 maps = [ (x[1], x[2]) for x in maps
2321 if x[0] == "map" or x[0] == "silent-map" ]
2323 for (from_, to) in maps:
2324 if from_ in s and to not in s:
2327 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2332 # No source found so return not ok
2337 __all__.append('source_exists')
2340 def get_suites_source_in(source, session=None):
2342 Returns list of Suite objects which given C{source} name is in
2345 @param source: DBSource package name to search for
2348 @return: list of Suite objects for the given source
2351 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2353 __all__.append('get_suites_source_in')
2356 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2358 Returns list of DBSource objects for given C{source} name and other parameters
2361 @param source: DBSource package name to search for
2363 @type version: str or None
2364 @param version: DBSource version name to search for or None if not applicable
2366 @type dm_upload_allowed: bool
2367 @param dm_upload_allowed: If None, no effect. If True or False, only
2368 return packages with that dm_upload_allowed setting
2370 @type session: Session
2371 @param session: Optional SQL session object (a temporary one will be
2372 generated if not supplied)
2375 @return: list of DBSource objects for the given name (may be empty)
2378 q = session.query(DBSource).filter_by(source=source)
2380 if version is not None:
2381 q = q.filter_by(version=version)
2383 if dm_upload_allowed is not None:
2384 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2388 __all__.append('get_sources_from_name')
2390 # FIXME: This function fails badly if it finds more than 1 source package and
2391 # its implementation is trivial enough to be inlined.
2393 def get_source_in_suite(source, suite, session=None):
2395 Returns a DBSource object for a combination of C{source} and C{suite}.
2397 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2398 - B{suite} - a suite name, eg. I{unstable}
2400 @type source: string
2401 @param source: source package name
2404 @param suite: the suite name
2407 @return: the version for I{source} in I{suite}
2411 q = get_suite(suite, session).get_sources(source)
2414 except NoResultFound:
2417 __all__.append('get_source_in_suite')
2419 ################################################################################
2422 def add_dsc_to_db(u, filename, session=None):
2423 entry = u.pkg.files[filename]
2427 source.source = u.pkg.dsc["source"]
2428 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2429 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2430 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2431 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2432 source.install_date = datetime.now().date()
2434 dsc_component = entry["component"]
2435 dsc_location_id = entry["location id"]
2437 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2439 # Set up a new poolfile if necessary
2440 if not entry.has_key("files id") or not entry["files id"]:
2441 filename = entry["pool name"] + filename
2442 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2444 pfs.append(poolfile)
2445 entry["files id"] = poolfile.file_id
2447 source.poolfile_id = entry["files id"]
2450 suite_names = u.pkg.changes["distribution"].keys()
2451 source.suites = session.query(Suite). \
2452 filter(Suite.suite_name.in_(suite_names)).all()
2454 # Add the source files to the DB (files and dsc_files)
2456 dscfile.source_id = source.source_id
2457 dscfile.poolfile_id = entry["files id"]
2458 session.add(dscfile)
2460 for dsc_file, dentry in u.pkg.dsc_files.items():
2462 df.source_id = source.source_id
2464 # If the .orig tarball is already in the pool, it's
2465 # files id is stored in dsc_files by check_dsc().
2466 files_id = dentry.get("files id", None)
2468 # Find the entry in the files hash
2469 # TODO: Bail out here properly
2471 for f, e in u.pkg.files.items():
2476 if files_id is None:
2477 filename = dfentry["pool name"] + dsc_file
2479 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2480 # FIXME: needs to check for -1/-2 and or handle exception
2481 if found and obj is not None:
2482 files_id = obj.file_id
2485 # If still not found, add it
2486 if files_id is None:
2487 # HACK: Force sha1sum etc into dentry
2488 dentry["sha1sum"] = dfentry["sha1sum"]
2489 dentry["sha256sum"] = dfentry["sha256sum"]
2490 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2491 pfs.append(poolfile)
2492 files_id = poolfile.file_id
2494 poolfile = get_poolfile_by_id(files_id, session)
2495 if poolfile is None:
2496 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2497 pfs.append(poolfile)
2499 df.poolfile_id = files_id
2502 # Add the src_uploaders to the DB
2503 uploader_ids = [source.maintainer_id]
2504 if u.pkg.dsc.has_key("uploaders"):
2505 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2507 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2510 for up_id in uploader_ids:
2511 if added_ids.has_key(up_id):
2513 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2519 su.maintainer_id = up_id
2520 su.source_id = source.source_id
2525 return source, dsc_component, dsc_location_id, pfs
2527 __all__.append('add_dsc_to_db')
2530 def add_deb_to_db(u, filename, session=None):
2532 Contrary to what you might expect, this routine deals with both
2533 debs and udebs. That info is in 'dbtype', whilst 'type' is
2534 'deb' for both of them
2537 entry = u.pkg.files[filename]
2540 bin.package = entry["package"]
2541 bin.version = entry["version"]
2542 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2543 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2544 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2545 bin.binarytype = entry["dbtype"]
2548 filename = entry["pool name"] + filename
2549 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2550 if not entry.get("location id", None):
2551 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2553 if entry.get("files id", None):
2554 poolfile = get_poolfile_by_id(bin.poolfile_id)
2555 bin.poolfile_id = entry["files id"]
2557 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2558 bin.poolfile_id = entry["files id"] = poolfile.file_id
2561 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2562 if len(bin_sources) != 1:
2563 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2564 (bin.package, bin.version, entry["architecture"],
2565 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2567 bin.source_id = bin_sources[0].source_id
2569 # Add and flush object so it has an ID
2572 suite_names = u.pkg.changes["distribution"].keys()
2573 bin.suites = session.query(Suite). \
2574 filter(Suite.suite_name.in_(suite_names)).all()
2578 # Deal with contents - disabled for now
2579 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2581 # print "REJECT\nCould not determine contents of package %s" % bin.package
2582 # session.rollback()
2583 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2587 __all__.append('add_deb_to_db')
2589 ################################################################################
2591 class SourceACL(object):
2592 def __init__(self, *args, **kwargs):
2596 return '<SourceACL %s>' % self.source_acl_id
2598 __all__.append('SourceACL')
2600 ################################################################################
2602 class SrcFormat(object):
2603 def __init__(self, *args, **kwargs):
2607 return '<SrcFormat %s>' % (self.format_name)
2609 __all__.append('SrcFormat')
2611 ################################################################################
2613 class SrcUploader(object):
2614 def __init__(self, *args, **kwargs):
2618 return '<SrcUploader %s>' % self.uploader_id
2620 __all__.append('SrcUploader')
2622 ################################################################################
2624 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2625 ('SuiteID', 'suite_id'),
2626 ('Version', 'version'),
2627 ('Origin', 'origin'),
2629 ('Description', 'description'),
2630 ('Untouchable', 'untouchable'),
2631 ('Announce', 'announce'),
2632 ('Codename', 'codename'),
2633 ('OverrideCodename', 'overridecodename'),
2634 ('ValidTime', 'validtime'),
2635 ('Priority', 'priority'),
2636 ('NotAutomatic', 'notautomatic'),
2637 ('CopyChanges', 'copychanges'),
2638 ('OverrideSuite', 'overridesuite')]
2640 # Why the heck don't we have any UNIQUE constraints in table suite?
2641 # TODO: Add UNIQUE constraints for appropriate columns.
2642 class Suite(ORMObject):
2643 def __init__(self, suite_name = None, version = None):
2644 self.suite_name = suite_name
2645 self.version = version
2647 def properties(self):
2648 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2651 def not_null_constraints(self):
2652 return ['suite_name', 'version']
2654 def __eq__(self, val):
2655 if isinstance(val, str):
2656 return (self.suite_name == val)
2657 # This signals to use the normal comparison operator
2658 return NotImplemented
2660 def __ne__(self, val):
2661 if isinstance(val, str):
2662 return (self.suite_name != val)
2663 # This signals to use the normal comparison operator
2664 return NotImplemented
2668 for disp, field in SUITE_FIELDS:
2669 val = getattr(self, field, None)
2671 ret.append("%s: %s" % (disp, val))
2673 return "\n".join(ret)
2675 def get_architectures(self, skipsrc=False, skipall=False):
2677 Returns list of Architecture objects
2679 @type skipsrc: boolean
2680 @param skipsrc: Whether to skip returning the 'source' architecture entry
2683 @type skipall: boolean
2684 @param skipall: Whether to skip returning the 'all' architecture entry
2688 @return: list of Architecture objects for the given name (may be empty)
2691 q = object_session(self).query(Architecture).with_parent(self)
2693 q = q.filter(Architecture.arch_string != 'source')
2695 q = q.filter(Architecture.arch_string != 'all')
2696 return q.order_by(Architecture.arch_string).all()
2698 def get_sources(self, source):
2700 Returns a query object representing DBSource that is part of C{suite}.
2702 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2704 @type source: string
2705 @param source: source package name
2707 @rtype: sqlalchemy.orm.query.Query
2708 @return: a query of DBSource
2712 session = object_session(self)
2713 return session.query(DBSource).filter_by(source = source). \
2716 __all__.append('Suite')
2719 def get_suite(suite, session=None):
2721 Returns Suite object for given C{suite name}.
2724 @param suite: The name of the suite
2726 @type session: Session
2727 @param session: Optional SQLA session object (a temporary one will be
2728 generated if not supplied)
2731 @return: Suite object for the requested suite name (None if not present)
2734 q = session.query(Suite).filter_by(suite_name=suite)
2738 except NoResultFound:
2741 __all__.append('get_suite')
2743 ################################################################################
2745 # TODO: should be removed because the implementation is too trivial
2747 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2749 Returns list of Architecture objects for given C{suite} name
2752 @param suite: Suite name to search for
2754 @type skipsrc: boolean
2755 @param skipsrc: Whether to skip returning the 'source' architecture entry
2758 @type skipall: boolean
2759 @param skipall: Whether to skip returning the 'all' architecture entry
2762 @type session: Session
2763 @param session: Optional SQL session object (a temporary one will be
2764 generated if not supplied)
2767 @return: list of Architecture objects for the given name (may be empty)
2770 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2772 __all__.append('get_suite_architectures')
2774 ################################################################################
2776 class SuiteSrcFormat(object):
2777 def __init__(self, *args, **kwargs):
2781 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2783 __all__.append('SuiteSrcFormat')
2786 def get_suite_src_formats(suite, session=None):
2788 Returns list of allowed SrcFormat for C{suite}.
2791 @param suite: Suite name to search for
2793 @type session: Session
2794 @param session: Optional SQL session object (a temporary one will be
2795 generated if not supplied)
2798 @return: the list of allowed source formats for I{suite}
2801 q = session.query(SrcFormat)
2802 q = q.join(SuiteSrcFormat)
2803 q = q.join(Suite).filter_by(suite_name=suite)
2804 q = q.order_by('format_name')
2808 __all__.append('get_suite_src_formats')
2810 ################################################################################
2812 class Uid(ORMObject):
2813 def __init__(self, uid = None, name = None):
2817 def __eq__(self, val):
2818 if isinstance(val, str):
2819 return (self.uid == val)
2820 # This signals to use the normal comparison operator
2821 return NotImplemented
2823 def __ne__(self, val):
2824 if isinstance(val, str):
2825 return (self.uid != val)
2826 # This signals to use the normal comparison operator
2827 return NotImplemented
2829 def properties(self):
2830 return ['uid', 'name', 'fingerprint']
2832 def not_null_constraints(self):
2835 __all__.append('Uid')
2838 def get_or_set_uid(uidname, session=None):
2840 Returns uid object for given uidname.
2842 If no matching uidname is found, a row is inserted.
2844 @type uidname: string
2845 @param uidname: The uid to add
2847 @type session: SQLAlchemy
2848 @param session: Optional SQL session object (a temporary one will be
2849 generated if not supplied). If not passed, a commit will be performed at
2850 the end of the function, otherwise the caller is responsible for commiting.
2853 @return: the uid object for the given uidname
2856 q = session.query(Uid).filter_by(uid=uidname)
2860 except NoResultFound:
2864 session.commit_or_flush()
2869 __all__.append('get_or_set_uid')
2872 def get_uid_from_fingerprint(fpr, session=None):
2873 q = session.query(Uid)
2874 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2878 except NoResultFound:
2881 __all__.append('get_uid_from_fingerprint')
2883 ################################################################################
2885 class UploadBlock(object):
2886 def __init__(self, *args, **kwargs):
2890 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2892 __all__.append('UploadBlock')
2894 ################################################################################
2896 class DBConn(object):
2898 database module init.
2902 def __init__(self, *args, **kwargs):
2903 self.__dict__ = self.__shared_state
2905 if not getattr(self, 'initialised', False):
2906 self.initialised = True
2907 self.debug = kwargs.has_key('debug')
2910 def __setuptables(self):
2911 tables_with_primary = (
2919 'build_queue_files',
2924 'changes_pending_binaries',
2925 'changes_pending_files',
2926 'changes_pending_source',
2936 'pending_bin_contents',
2950 tables_no_primary = (
2951 'changes_pending_files_map',
2952 'changes_pending_source_files',
2953 'changes_pool_files',
2955 # TODO: the maintainer column in table override should be removed.
2957 'suite_architectures',
2958 'suite_src_formats',
2959 'suite_build_queue_copy',
2964 'almost_obsolete_all_associations',
2965 'almost_obsolete_src_associations',
2966 'any_associations_source',
2967 'bin_assoc_by_arch',
2968 'bin_associations_binaries',
2969 'binaries_suite_arch',
2970 'binfiles_suite_component_arch',
2973 'newest_all_associations',
2974 'newest_any_associations',
2976 'newest_src_association',
2977 'obsolete_all_associations',
2978 'obsolete_any_associations',
2979 'obsolete_any_by_all_associations',
2980 'obsolete_src_associations',
2982 'src_associations_bin',
2983 'src_associations_src',
2984 'suite_arch_by_name',
2987 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2988 # correctly and that is why we have to use a workaround. It can
2989 # be removed as soon as we switch to version 0.6.
2990 for table_name in tables_with_primary:
2991 table = Table(table_name, self.db_meta, \
2992 Column('id', Integer, primary_key = True), \
2993 autoload=True, useexisting=True)
2994 setattr(self, 'tbl_%s' % table_name, table)
2996 for table_name in tables_no_primary:
2997 table = Table(table_name, self.db_meta, autoload=True)
2998 setattr(self, 'tbl_%s' % table_name, table)
3000 # bin_contents needs special attention until update #41 has been
3002 self.tbl_bin_contents = Table('bin_contents', self.db_meta, \
3003 Column('file', Text, primary_key = True),
3004 Column('binary_id', Integer, ForeignKey('binaries.id'), \
3005 primary_key = True),
3006 autoload=True, useexisting=True)
3008 for view_name in views:
3009 view = Table(view_name, self.db_meta, autoload=True)
3010 setattr(self, 'view_%s' % view_name, view)
3012 def __setupmappers(self):
3013 mapper(Architecture, self.tbl_architecture,
3014 properties = dict(arch_id = self.tbl_architecture.c.id,
3015 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3016 order_by='suite_name',
3017 backref=backref('architectures', order_by='arch_string'))),
3018 extension = validator)
3020 mapper(Archive, self.tbl_archive,
3021 properties = dict(archive_id = self.tbl_archive.c.id,
3022 archive_name = self.tbl_archive.c.name))
3024 mapper(PendingBinContents, self.tbl_pending_bin_contents,
3025 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3026 filename = self.tbl_pending_bin_contents.c.filename,
3027 package = self.tbl_pending_bin_contents.c.package,
3028 version = self.tbl_pending_bin_contents.c.version,
3029 arch = self.tbl_pending_bin_contents.c.arch,
3030 otype = self.tbl_pending_bin_contents.c.type))
3032 mapper(DebContents, self.tbl_deb_contents,
3033 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3034 package=self.tbl_deb_contents.c.package,
3035 suite=self.tbl_deb_contents.c.suite,
3036 arch=self.tbl_deb_contents.c.arch,
3037 section=self.tbl_deb_contents.c.section,
3038 filename=self.tbl_deb_contents.c.filename))
3040 mapper(UdebContents, self.tbl_udeb_contents,
3041 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3042 package=self.tbl_udeb_contents.c.package,
3043 suite=self.tbl_udeb_contents.c.suite,
3044 arch=self.tbl_udeb_contents.c.arch,
3045 section=self.tbl_udeb_contents.c.section,
3046 filename=self.tbl_udeb_contents.c.filename))
3048 mapper(BuildQueue, self.tbl_build_queue,
3049 properties = dict(queue_id = self.tbl_build_queue.c.id))
3051 mapper(BuildQueueFile, self.tbl_build_queue_files,
3052 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3053 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3055 mapper(DBBinary, self.tbl_binaries,
3056 properties = dict(binary_id = self.tbl_binaries.c.id,
3057 package = self.tbl_binaries.c.package,
3058 version = self.tbl_binaries.c.version,
3059 maintainer_id = self.tbl_binaries.c.maintainer,
3060 maintainer = relation(Maintainer),
3061 source_id = self.tbl_binaries.c.source,
3062 source = relation(DBSource, backref='binaries'),
3063 arch_id = self.tbl_binaries.c.architecture,
3064 architecture = relation(Architecture),
3065 poolfile_id = self.tbl_binaries.c.file,
3066 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3067 binarytype = self.tbl_binaries.c.type,
3068 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3069 fingerprint = relation(Fingerprint),
3070 install_date = self.tbl_binaries.c.install_date,
3071 suites = relation(Suite, secondary=self.tbl_bin_associations,
3072 backref=backref('binaries', lazy='dynamic'))),
3073 extension = validator)
3075 mapper(BinaryACL, self.tbl_binary_acl,
3076 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3078 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3079 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3080 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3081 architecture = relation(Architecture)))
3083 mapper(Component, self.tbl_component,
3084 properties = dict(component_id = self.tbl_component.c.id,
3085 component_name = self.tbl_component.c.name),
3086 extension = validator)
3088 mapper(DBConfig, self.tbl_config,
3089 properties = dict(config_id = self.tbl_config.c.id))
3091 mapper(DSCFile, self.tbl_dsc_files,
3092 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3093 source_id = self.tbl_dsc_files.c.source,
3094 source = relation(DBSource),
3095 poolfile_id = self.tbl_dsc_files.c.file,
3096 poolfile = relation(PoolFile)))
3098 mapper(PoolFile, self.tbl_files,
3099 properties = dict(file_id = self.tbl_files.c.id,
3100 filesize = self.tbl_files.c.size,
3101 location_id = self.tbl_files.c.location,
3102 location = relation(Location,
3103 # using lazy='dynamic' in the back
3104 # reference because we have A LOT of
3105 # files in one location
3106 backref=backref('files', lazy='dynamic'))),
3107 extension = validator)
3109 mapper(Fingerprint, self.tbl_fingerprint,
3110 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3111 uid_id = self.tbl_fingerprint.c.uid,
3112 uid = relation(Uid),
3113 keyring_id = self.tbl_fingerprint.c.keyring,
3114 keyring = relation(Keyring),
3115 source_acl = relation(SourceACL),
3116 binary_acl = relation(BinaryACL)),
3117 extension = validator)
3119 mapper(Keyring, self.tbl_keyrings,
3120 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3121 keyring_id = self.tbl_keyrings.c.id))
3123 mapper(DBChange, self.tbl_changes,
3124 properties = dict(change_id = self.tbl_changes.c.id,
3125 poolfiles = relation(PoolFile,
3126 secondary=self.tbl_changes_pool_files,
3127 backref="changeslinks"),
3128 seen = self.tbl_changes.c.seen,
3129 source = self.tbl_changes.c.source,
3130 binaries = self.tbl_changes.c.binaries,
3131 architecture = self.tbl_changes.c.architecture,
3132 distribution = self.tbl_changes.c.distribution,
3133 urgency = self.tbl_changes.c.urgency,
3134 maintainer = self.tbl_changes.c.maintainer,
3135 changedby = self.tbl_changes.c.changedby,
3136 date = self.tbl_changes.c.date,
3137 version = self.tbl_changes.c.version,
3138 files = relation(ChangePendingFile,
3139 secondary=self.tbl_changes_pending_files_map,
3140 backref="changesfile"),
3141 in_queue_id = self.tbl_changes.c.in_queue,
3142 in_queue = relation(PolicyQueue,
3143 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3144 approved_for_id = self.tbl_changes.c.approved_for))
3146 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3147 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3149 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3150 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3151 filename = self.tbl_changes_pending_files.c.filename,
3152 size = self.tbl_changes_pending_files.c.size,
3153 md5sum = self.tbl_changes_pending_files.c.md5sum,
3154 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3155 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3157 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3158 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3159 change = relation(DBChange),
3160 maintainer = relation(Maintainer,
3161 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3162 changedby = relation(Maintainer,
3163 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3164 fingerprint = relation(Fingerprint),
3165 source_files = relation(ChangePendingFile,
3166 secondary=self.tbl_changes_pending_source_files,
3167 backref="pending_sources")))
3170 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3171 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3172 keyring = relation(Keyring, backref="keyring_acl_map"),
3173 architecture = relation(Architecture)))
3175 mapper(Location, self.tbl_location,
3176 properties = dict(location_id = self.tbl_location.c.id,
3177 component_id = self.tbl_location.c.component,
3178 component = relation(Component, backref='location'),
3179 archive_id = self.tbl_location.c.archive,
3180 archive = relation(Archive),
3181 # FIXME: the 'type' column is old cruft and
3182 # should be removed in the future.
3183 archive_type = self.tbl_location.c.type),
3184 extension = validator)
3186 mapper(Maintainer, self.tbl_maintainer,
3187 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3188 maintains_sources = relation(DBSource, backref='maintainer',
3189 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3190 changed_sources = relation(DBSource, backref='changedby',
3191 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3192 extension = validator)
3194 mapper(NewComment, self.tbl_new_comments,
3195 properties = dict(comment_id = self.tbl_new_comments.c.id))
3197 mapper(Override, self.tbl_override,
3198 properties = dict(suite_id = self.tbl_override.c.suite,
3199 suite = relation(Suite, \
3200 backref=backref('overrides', lazy='dynamic')),
3201 package = self.tbl_override.c.package,
3202 component_id = self.tbl_override.c.component,
3203 component = relation(Component, \
3204 backref=backref('overrides', lazy='dynamic')),
3205 priority_id = self.tbl_override.c.priority,
3206 priority = relation(Priority, \
3207 backref=backref('overrides', lazy='dynamic')),
3208 section_id = self.tbl_override.c.section,
3209 section = relation(Section, \
3210 backref=backref('overrides', lazy='dynamic')),
3211 overridetype_id = self.tbl_override.c.type,
3212 overridetype = relation(OverrideType, \
3213 backref=backref('overrides', lazy='dynamic'))))
3215 mapper(OverrideType, self.tbl_override_type,
3216 properties = dict(overridetype = self.tbl_override_type.c.type,
3217 overridetype_id = self.tbl_override_type.c.id))
3219 mapper(PolicyQueue, self.tbl_policy_queue,
3220 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3222 mapper(Priority, self.tbl_priority,
3223 properties = dict(priority_id = self.tbl_priority.c.id))
3225 mapper(Section, self.tbl_section,
3226 properties = dict(section_id = self.tbl_section.c.id,
3227 section=self.tbl_section.c.section))
3229 mapper(DBSource, self.tbl_source,
3230 properties = dict(source_id = self.tbl_source.c.id,
3231 version = self.tbl_source.c.version,
3232 maintainer_id = self.tbl_source.c.maintainer,
3233 poolfile_id = self.tbl_source.c.file,
3234 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3235 fingerprint_id = self.tbl_source.c.sig_fpr,
3236 fingerprint = relation(Fingerprint),
3237 changedby_id = self.tbl_source.c.changedby,
3238 srcfiles = relation(DSCFile,
3239 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3240 suites = relation(Suite, secondary=self.tbl_src_associations,
3241 backref=backref('sources', lazy='dynamic')),
3242 srcuploaders = relation(SrcUploader)),
3243 extension = validator)
3245 mapper(SourceACL, self.tbl_source_acl,
3246 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3248 mapper(SrcFormat, self.tbl_src_format,
3249 properties = dict(src_format_id = self.tbl_src_format.c.id,
3250 format_name = self.tbl_src_format.c.format_name))
3252 mapper(SrcUploader, self.tbl_src_uploaders,
3253 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3254 source_id = self.tbl_src_uploaders.c.source,
3255 source = relation(DBSource,
3256 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3257 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3258 maintainer = relation(Maintainer,
3259 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3261 mapper(Suite, self.tbl_suite,
3262 properties = dict(suite_id = self.tbl_suite.c.id,
3263 policy_queue = relation(PolicyQueue),
3264 copy_queues = relation(BuildQueue,
3265 secondary=self.tbl_suite_build_queue_copy)),
3266 extension = validator)
3268 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3269 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3270 suite = relation(Suite, backref='suitesrcformats'),
3271 src_format_id = self.tbl_suite_src_formats.c.src_format,
3272 src_format = relation(SrcFormat)))
3274 mapper(Uid, self.tbl_uid,
3275 properties = dict(uid_id = self.tbl_uid.c.id,
3276 fingerprint = relation(Fingerprint)),
3277 extension = validator)
3279 mapper(UploadBlock, self.tbl_upload_blocks,
3280 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3281 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3282 uid = relation(Uid, backref="uploadblocks")))
3284 mapper(BinContents, self.tbl_bin_contents,
3286 binary = relation(DBBinary,
3287 backref=backref('contents', lazy='dynamic')),
3288 file = self.tbl_bin_contents.c.file))
3290 ## Connection functions
3291 def __createconn(self):
3292 from config import Config
3296 connstr = "postgres://%s" % cnf["DB::Host"]
3297 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3298 connstr += ":%s" % cnf["DB::Port"]
3299 connstr += "/%s" % cnf["DB::Name"]
3302 connstr = "postgres:///%s" % cnf["DB::Name"]
3303 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3304 connstr += "?port=%s" % cnf["DB::Port"]
3306 self.db_pg = create_engine(connstr, echo=self.debug)
3307 self.db_meta = MetaData()
3308 self.db_meta.bind = self.db_pg
3309 self.db_smaker = sessionmaker(bind=self.db_pg,
3313 self.__setuptables()
3314 self.__setupmappers()
3317 return self.db_smaker()
3319 __all__.append('DBConn')