5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
277 def get(cls, primary_key, session = None):
279 This is a support function that allows getting an object by its primary
282 Architecture.get(3[, session])
284 instead of the more verbose
286 session.query(Architecture).get(3)
288 return session.query(cls).get(primary_key)
290 __all__.append('ORMObject')
292 ################################################################################
294 class Validator(MapperExtension):
296 This class calls the validate() method for each instance for the
297 'before_update' and 'before_insert' events. A global object validator is
298 used for configuring the individual mappers.
301 def before_update(self, mapper, connection, instance):
305 def before_insert(self, mapper, connection, instance):
309 validator = Validator()
311 ################################################################################
313 class Architecture(ORMObject):
314 def __init__(self, arch_string = None, description = None):
315 self.arch_string = arch_string
316 self.description = description
318 def __eq__(self, val):
319 if isinstance(val, str):
320 return (self.arch_string== val)
321 # This signals to use the normal comparison operator
322 return NotImplemented
324 def __ne__(self, val):
325 if isinstance(val, str):
326 return (self.arch_string != val)
327 # This signals to use the normal comparison operator
328 return NotImplemented
330 def properties(self):
331 return ['arch_string', 'arch_id', 'suites_count']
333 def not_null_constraints(self):
334 return ['arch_string']
336 __all__.append('Architecture')
339 def get_architecture(architecture, session=None):
341 Returns database id for given C{architecture}.
343 @type architecture: string
344 @param architecture: The name of the architecture
346 @type session: Session
347 @param session: Optional SQLA session object (a temporary one will be
348 generated if not supplied)
351 @return: Architecture object for the given arch (None if not present)
354 q = session.query(Architecture).filter_by(arch_string=architecture)
358 except NoResultFound:
361 __all__.append('get_architecture')
363 # TODO: should be removed because the implementation is too trivial
365 def get_architecture_suites(architecture, session=None):
367 Returns list of Suite objects for given C{architecture} name
369 @type architecture: str
370 @param architecture: Architecture name to search for
372 @type session: Session
373 @param session: Optional SQL session object (a temporary one will be
374 generated if not supplied)
377 @return: list of Suite objects for the given name (may be empty)
380 return get_architecture(architecture, session).suites
382 __all__.append('get_architecture_suites')
384 ################################################################################
386 class Archive(object):
387 def __init__(self, *args, **kwargs):
391 return '<Archive %s>' % self.archive_name
393 __all__.append('Archive')
396 def get_archive(archive, session=None):
398 returns database id for given C{archive}.
400 @type archive: string
401 @param archive: the name of the arhive
403 @type session: Session
404 @param session: Optional SQLA session object (a temporary one will be
405 generated if not supplied)
408 @return: Archive object for the given name (None if not present)
411 archive = archive.lower()
413 q = session.query(Archive).filter_by(archive_name=archive)
417 except NoResultFound:
420 __all__.append('get_archive')
422 ################################################################################
424 class BinAssociation(object):
425 def __init__(self, *args, **kwargs):
429 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
431 __all__.append('BinAssociation')
433 ################################################################################
435 class BinContents(object):
436 def __init__(self, *args, **kwargs):
440 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
442 __all__.append('BinContents')
444 ################################################################################
446 class DBBinary(ORMObject):
447 def __init__(self, package = None, source = None, version = None, \
448 maintainer = None, architecture = None, poolfile = None, \
450 self.package = package
452 self.version = version
453 self.maintainer = maintainer
454 self.architecture = architecture
455 self.poolfile = poolfile
456 self.binarytype = binarytype
458 def properties(self):
459 return ['package', 'version', 'maintainer', 'source', 'architecture', \
460 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
463 def not_null_constraints(self):
464 return ['package', 'version', 'maintainer', 'source', 'architecture', \
465 'poolfile', 'binarytype']
467 __all__.append('DBBinary')
470 def get_suites_binary_in(package, session=None):
472 Returns list of Suite objects which given C{package} name is in
475 @param package: DBBinary package name to search for
478 @return: list of Suite objects for the given package
481 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
483 __all__.append('get_suites_binary_in')
486 def get_binary_from_id(binary_id, session=None):
488 Returns DBBinary object for given C{id}
491 @param binary_id: Id of the required binary
493 @type session: Session
494 @param session: Optional SQLA session object (a temporary one will be
495 generated if not supplied)
498 @return: DBBinary object for the given binary (None if not present)
501 q = session.query(DBBinary).filter_by(binary_id=binary_id)
505 except NoResultFound:
508 __all__.append('get_binary_from_id')
511 def get_binaries_from_name(package, version=None, architecture=None, session=None):
513 Returns list of DBBinary objects for given C{package} name
516 @param package: DBBinary package name to search for
518 @type version: str or None
519 @param version: Version to search for (or None)
521 @type architecture: str, list or None
522 @param architecture: Architectures to limit to (or None if no limit)
524 @type session: Session
525 @param session: Optional SQL session object (a temporary one will be
526 generated if not supplied)
529 @return: list of DBBinary objects for the given name (may be empty)
532 q = session.query(DBBinary).filter_by(package=package)
534 if version is not None:
535 q = q.filter_by(version=version)
537 if architecture is not None:
538 if not isinstance(architecture, list):
539 architecture = [architecture]
540 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
546 __all__.append('get_binaries_from_name')
549 def get_binaries_from_source_id(source_id, session=None):
551 Returns list of DBBinary objects for given C{source_id}
554 @param source_id: source_id to search for
556 @type session: Session
557 @param session: Optional SQL session object (a temporary one will be
558 generated if not supplied)
561 @return: list of DBBinary objects for the given name (may be empty)
564 return session.query(DBBinary).filter_by(source_id=source_id).all()
566 __all__.append('get_binaries_from_source_id')
569 def get_binary_from_name_suite(package, suitename, session=None):
570 ### For dak examine-package
571 ### XXX: Doesn't use object API yet
573 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
574 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
575 WHERE b.package='%(package)s'
577 AND fi.location = l.id
578 AND l.component = c.id
581 AND su.suite_name %(suitename)s
582 ORDER BY b.version DESC"""
584 return session.execute(sql % {'package': package, 'suitename': suitename})
586 __all__.append('get_binary_from_name_suite')
589 def get_binary_components(package, suitename, arch, session=None):
590 # Check for packages that have moved from one component to another
591 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
592 WHERE b.package=:package AND s.suite_name=:suitename
593 AND (a.arch_string = :arch OR a.arch_string = 'all')
594 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
595 AND f.location = l.id
596 AND l.component = c.id
599 vals = {'package': package, 'suitename': suitename, 'arch': arch}
601 return session.execute(query, vals)
603 __all__.append('get_binary_components')
605 ################################################################################
607 class BinaryACL(object):
608 def __init__(self, *args, **kwargs):
612 return '<BinaryACL %s>' % self.binary_acl_id
614 __all__.append('BinaryACL')
616 ################################################################################
618 class BinaryACLMap(object):
619 def __init__(self, *args, **kwargs):
623 return '<BinaryACLMap %s>' % self.binary_acl_map_id
625 __all__.append('BinaryACLMap')
627 ################################################################################
632 ArchiveDir "%(archivepath)s";
633 OverrideDir "%(overridedir)s";
634 CacheDir "%(cachedir)s";
639 Packages::Compress ". bzip2 gzip";
640 Sources::Compress ". bzip2 gzip";
645 bindirectory "incoming"
650 BinOverride "override.sid.all3";
651 BinCacheDB "packages-accepted.db";
653 FileList "%(filelist)s";
656 Packages::Extensions ".deb .udeb";
659 bindirectory "incoming/"
662 BinOverride "override.sid.all3";
663 SrcOverride "override.sid.all3.src";
664 FileList "%(filelist)s";
668 class BuildQueue(object):
669 def __init__(self, *args, **kwargs):
673 return '<BuildQueue %s>' % self.queue_name
675 def write_metadata(self, starttime, force=False):
676 # Do we write out metafiles?
677 if not (force or self.generate_metadata):
680 session = DBConn().session().object_session(self)
682 fl_fd = fl_name = ac_fd = ac_name = None
684 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
685 startdir = os.getcwd()
688 # Grab files we want to include
689 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
690 # Write file list with newer files
691 (fl_fd, fl_name) = mkstemp()
693 os.write(fl_fd, '%s\n' % n.fullpath)
698 # Write minimal apt.conf
699 # TODO: Remove hardcoding from template
700 (ac_fd, ac_name) = mkstemp()
701 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
703 'cachedir': cnf["Dir::Cache"],
704 'overridedir': cnf["Dir::Override"],
708 # Run apt-ftparchive generate
709 os.chdir(os.path.dirname(ac_name))
710 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
712 # Run apt-ftparchive release
713 # TODO: Eww - fix this
714 bname = os.path.basename(self.path)
718 # We have to remove the Release file otherwise it'll be included in the
721 os.unlink(os.path.join(bname, 'Release'))
725 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
727 # Crude hack with open and append, but this whole section is and should be redone.
728 if self.notautomatic:
729 release=open("Release", "a")
730 release.write("NotAutomatic: yes")
735 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
736 if cnf.has_key("Dinstall::SigningPubKeyring"):
737 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
739 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
741 # Move the files if we got this far
742 os.rename('Release', os.path.join(bname, 'Release'))
744 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
746 # Clean up any left behind files
773 def clean_and_update(self, starttime, Logger, dryrun=False):
774 """WARNING: This routine commits for you"""
775 session = DBConn().session().object_session(self)
777 if self.generate_metadata and not dryrun:
778 self.write_metadata(starttime)
780 # Grab files older than our execution time
781 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
787 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
789 Logger.log(["I: Removing %s from the queue" % o.fullpath])
790 os.unlink(o.fullpath)
793 # If it wasn't there, don't worry
794 if e.errno == ENOENT:
797 # TODO: Replace with proper logging call
798 Logger.log(["E: Could not remove %s" % o.fullpath])
805 for f in os.listdir(self.path):
806 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
810 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
811 except NoResultFound:
812 fp = os.path.join(self.path, f)
814 Logger.log(["I: Would remove unused link %s" % fp])
816 Logger.log(["I: Removing unused link %s" % fp])
820 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
822 def add_file_from_pool(self, poolfile):
823 """Copies a file into the pool. Assumes that the PoolFile object is
824 attached to the same SQLAlchemy session as the Queue object is.
826 The caller is responsible for committing after calling this function."""
827 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
829 # Check if we have a file of this name or this ID already
830 for f in self.queuefiles:
831 if f.fileid is not None and f.fileid == poolfile.file_id or \
832 f.poolfile.filename == poolfile_basename:
833 # In this case, update the BuildQueueFile entry so we
834 # don't remove it too early
835 f.lastused = datetime.now()
836 DBConn().session().object_session(poolfile).add(f)
839 # Prepare BuildQueueFile object
840 qf = BuildQueueFile()
841 qf.build_queue_id = self.queue_id
842 qf.lastused = datetime.now()
843 qf.filename = poolfile_basename
845 targetpath = poolfile.fullpath
846 queuepath = os.path.join(self.path, poolfile_basename)
850 # We need to copy instead of symlink
852 utils.copy(targetpath, queuepath)
853 # NULL in the fileid field implies a copy
856 os.symlink(targetpath, queuepath)
857 qf.fileid = poolfile.file_id
861 # Get the same session as the PoolFile is using and add the qf to it
862 DBConn().session().object_session(poolfile).add(qf)
867 __all__.append('BuildQueue')
870 def get_build_queue(queuename, session=None):
872 Returns BuildQueue object for given C{queue name}, creating it if it does not
875 @type queuename: string
876 @param queuename: The name of the queue
878 @type session: Session
879 @param session: Optional SQLA session object (a temporary one will be
880 generated if not supplied)
883 @return: BuildQueue object for the given queue
886 q = session.query(BuildQueue).filter_by(queue_name=queuename)
890 except NoResultFound:
893 __all__.append('get_build_queue')
895 ################################################################################
897 class BuildQueueFile(object):
898 def __init__(self, *args, **kwargs):
902 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
906 return os.path.join(self.buildqueue.path, self.filename)
909 __all__.append('BuildQueueFile')
911 ################################################################################
913 class ChangePendingBinary(object):
914 def __init__(self, *args, **kwargs):
918 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
920 __all__.append('ChangePendingBinary')
922 ################################################################################
924 class ChangePendingFile(object):
925 def __init__(self, *args, **kwargs):
929 return '<ChangePendingFile %s>' % self.change_pending_file_id
931 __all__.append('ChangePendingFile')
933 ################################################################################
935 class ChangePendingSource(object):
936 def __init__(self, *args, **kwargs):
940 return '<ChangePendingSource %s>' % self.change_pending_source_id
942 __all__.append('ChangePendingSource')
944 ################################################################################
946 class Component(object):
947 def __init__(self, *args, **kwargs):
950 def __eq__(self, val):
951 if isinstance(val, str):
952 return (self.component_name == val)
953 # This signals to use the normal comparison operator
954 return NotImplemented
956 def __ne__(self, val):
957 if isinstance(val, str):
958 return (self.component_name != val)
959 # This signals to use the normal comparison operator
960 return NotImplemented
963 return '<Component %s>' % self.component_name
966 __all__.append('Component')
969 def get_component(component, session=None):
971 Returns database id for given C{component}.
973 @type component: string
974 @param component: The name of the override type
977 @return: the database id for the given component
980 component = component.lower()
982 q = session.query(Component).filter_by(component_name=component)
986 except NoResultFound:
989 __all__.append('get_component')
991 ################################################################################
993 class DBConfig(object):
994 def __init__(self, *args, **kwargs):
998 return '<DBConfig %s>' % self.name
1000 __all__.append('DBConfig')
1002 ################################################################################
1005 def get_or_set_contents_file_id(filename, session=None):
1007 Returns database id for given filename.
1009 If no matching file is found, a row is inserted.
1011 @type filename: string
1012 @param filename: The filename
1013 @type session: SQLAlchemy
1014 @param session: Optional SQL session object (a temporary one will be
1015 generated if not supplied). If not passed, a commit will be performed at
1016 the end of the function, otherwise the caller is responsible for commiting.
1019 @return: the database id for the given component
1022 q = session.query(ContentFilename).filter_by(filename=filename)
1025 ret = q.one().cafilename_id
1026 except NoResultFound:
1027 cf = ContentFilename()
1028 cf.filename = filename
1030 session.commit_or_flush()
1031 ret = cf.cafilename_id
1035 __all__.append('get_or_set_contents_file_id')
1038 def get_contents(suite, overridetype, section=None, session=None):
1040 Returns contents for a suite / overridetype combination, limiting
1041 to a section if not None.
1044 @param suite: Suite object
1046 @type overridetype: OverrideType
1047 @param overridetype: OverrideType object
1049 @type section: Section
1050 @param section: Optional section object to limit results to
1052 @type session: SQLAlchemy
1053 @param session: Optional SQL session object (a temporary one will be
1054 generated if not supplied)
1056 @rtype: ResultsProxy
1057 @return: ResultsProxy object set up to return tuples of (filename, section,
1061 # find me all of the contents for a given suite
1062 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1066 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1067 JOIN content_file_names n ON (c.filename=n.id)
1068 JOIN binaries b ON (b.id=c.binary_pkg)
1069 JOIN override o ON (o.package=b.package)
1070 JOIN section s ON (s.id=o.section)
1071 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1072 AND b.type=:overridetypename"""
1074 vals = {'suiteid': suite.suite_id,
1075 'overridetypeid': overridetype.overridetype_id,
1076 'overridetypename': overridetype.overridetype}
1078 if section is not None:
1079 contents_q += " AND s.id = :sectionid"
1080 vals['sectionid'] = section.section_id
1082 contents_q += " ORDER BY fn"
1084 return session.execute(contents_q, vals)
1086 __all__.append('get_contents')
1088 ################################################################################
1090 class ContentFilepath(object):
1091 def __init__(self, *args, **kwargs):
1095 return '<ContentFilepath %s>' % self.filepath
1097 __all__.append('ContentFilepath')
1100 def get_or_set_contents_path_id(filepath, session=None):
1102 Returns database id for given path.
1104 If no matching file is found, a row is inserted.
1106 @type filepath: string
1107 @param filepath: The filepath
1109 @type session: SQLAlchemy
1110 @param session: Optional SQL session object (a temporary one will be
1111 generated if not supplied). If not passed, a commit will be performed at
1112 the end of the function, otherwise the caller is responsible for commiting.
1115 @return: the database id for the given path
1118 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1121 ret = q.one().cafilepath_id
1122 except NoResultFound:
1123 cf = ContentFilepath()
1124 cf.filepath = filepath
1126 session.commit_or_flush()
1127 ret = cf.cafilepath_id
1131 __all__.append('get_or_set_contents_path_id')
1133 ################################################################################
1135 class ContentAssociation(object):
1136 def __init__(self, *args, **kwargs):
1140 return '<ContentAssociation %s>' % self.ca_id
1142 __all__.append('ContentAssociation')
1144 def insert_content_paths(binary_id, fullpaths, session=None):
1146 Make sure given path is associated with given binary id
1148 @type binary_id: int
1149 @param binary_id: the id of the binary
1150 @type fullpaths: list
1151 @param fullpaths: the list of paths of the file being associated with the binary
1152 @type session: SQLAlchemy session
1153 @param session: Optional SQLAlchemy session. If this is passed, the caller
1154 is responsible for ensuring a transaction has begun and committing the
1155 results or rolling back based on the result code. If not passed, a commit
1156 will be performed at the end of the function, otherwise the caller is
1157 responsible for commiting.
1159 @return: True upon success
1162 privatetrans = False
1164 session = DBConn().session()
1169 def generate_path_dicts():
1170 for fullpath in fullpaths:
1171 if fullpath.startswith( './' ):
1172 fullpath = fullpath[2:]
1174 yield {'filename':fullpath, 'id': binary_id }
1176 for d in generate_path_dicts():
1177 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1186 traceback.print_exc()
1188 # Only rollback if we set up the session ourself
1195 __all__.append('insert_content_paths')
1197 ################################################################################
1199 class DSCFile(object):
1200 def __init__(self, *args, **kwargs):
1204 return '<DSCFile %s>' % self.dscfile_id
1206 __all__.append('DSCFile')
1209 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1211 Returns a list of DSCFiles which may be empty
1213 @type dscfile_id: int (optional)
1214 @param dscfile_id: the dscfile_id of the DSCFiles to find
1216 @type source_id: int (optional)
1217 @param source_id: the source id related to the DSCFiles to find
1219 @type poolfile_id: int (optional)
1220 @param poolfile_id: the poolfile id related to the DSCFiles to find
1223 @return: Possibly empty list of DSCFiles
1226 q = session.query(DSCFile)
1228 if dscfile_id is not None:
1229 q = q.filter_by(dscfile_id=dscfile_id)
1231 if source_id is not None:
1232 q = q.filter_by(source_id=source_id)
1234 if poolfile_id is not None:
1235 q = q.filter_by(poolfile_id=poolfile_id)
1239 __all__.append('get_dscfiles')
1241 ################################################################################
1243 class PoolFile(ORMObject):
1244 def __init__(self, filename = None, location = None, filesize = -1, \
1246 self.filename = filename
1247 self.location = location
1248 self.filesize = filesize
1249 self.md5sum = md5sum
1253 return os.path.join(self.location.path, self.filename)
1255 def is_valid(self, filesize = -1, md5sum = None):\
1256 return self.filesize == filesize and self.md5sum == md5sum
1258 def properties(self):
1259 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1260 'sha256sum', 'location', 'source', 'last_used']
1262 def not_null_constraints(self):
1263 return ['filename', 'md5sum', 'location']
1265 __all__.append('PoolFile')
1268 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1271 (ValidFileFound [boolean], PoolFile object or None)
1273 @type filename: string
1274 @param filename: the filename of the file to check against the DB
1277 @param filesize: the size of the file to check against the DB
1279 @type md5sum: string
1280 @param md5sum: the md5sum of the file to check against the DB
1282 @type location_id: int
1283 @param location_id: the id of the location to look in
1286 @return: Tuple of length 2.
1287 - If valid pool file found: (C{True}, C{PoolFile object})
1288 - If valid pool file not found:
1289 - (C{False}, C{None}) if no file found
1290 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1293 poolfile = session.query(Location).get(location_id). \
1294 files.filter_by(filename=filename).first()
1296 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1299 return (valid, poolfile)
1301 __all__.append('check_poolfile')
1303 # TODO: the implementation can trivially be inlined at the place where the
1304 # function is called
1306 def get_poolfile_by_id(file_id, session=None):
1308 Returns a PoolFile objects or None for the given id
1311 @param file_id: the id of the file to look for
1313 @rtype: PoolFile or None
1314 @return: either the PoolFile object or None
1317 return session.query(PoolFile).get(file_id)
1319 __all__.append('get_poolfile_by_id')
1322 def get_poolfile_like_name(filename, session=None):
1324 Returns an array of PoolFile objects which are like the given name
1326 @type filename: string
1327 @param filename: the filename of the file to check against the DB
1330 @return: array of PoolFile objects
1333 # TODO: There must be a way of properly using bind parameters with %FOO%
1334 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1338 __all__.append('get_poolfile_like_name')
1341 def add_poolfile(filename, datadict, location_id, session=None):
1343 Add a new file to the pool
1345 @type filename: string
1346 @param filename: filename
1348 @type datadict: dict
1349 @param datadict: dict with needed data
1351 @type location_id: int
1352 @param location_id: database id of the location
1355 @return: the PoolFile object created
1357 poolfile = PoolFile()
1358 poolfile.filename = filename
1359 poolfile.filesize = datadict["size"]
1360 poolfile.md5sum = datadict["md5sum"]
1361 poolfile.sha1sum = datadict["sha1sum"]
1362 poolfile.sha256sum = datadict["sha256sum"]
1363 poolfile.location_id = location_id
1365 session.add(poolfile)
1366 # Flush to get a file id (NB: This is not a commit)
1371 __all__.append('add_poolfile')
1373 ################################################################################
1375 class Fingerprint(ORMObject):
1376 def __init__(self, fingerprint = None):
1377 self.fingerprint = fingerprint
1379 def properties(self):
1380 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1383 def not_null_constraints(self):
1384 return ['fingerprint']
1386 __all__.append('Fingerprint')
1389 def get_fingerprint(fpr, session=None):
1391 Returns Fingerprint object for given fpr.
1394 @param fpr: The fpr to find / add
1396 @type session: SQLAlchemy
1397 @param session: Optional SQL session object (a temporary one will be
1398 generated if not supplied).
1401 @return: the Fingerprint object for the given fpr or None
1404 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1408 except NoResultFound:
1413 __all__.append('get_fingerprint')
1416 def get_or_set_fingerprint(fpr, session=None):
1418 Returns Fingerprint object for given fpr.
1420 If no matching fpr is found, a row is inserted.
1423 @param fpr: The fpr to find / add
1425 @type session: SQLAlchemy
1426 @param session: Optional SQL session object (a temporary one will be
1427 generated if not supplied). If not passed, a commit will be performed at
1428 the end of the function, otherwise the caller is responsible for commiting.
1429 A flush will be performed either way.
1432 @return: the Fingerprint object for the given fpr
1435 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1439 except NoResultFound:
1440 fingerprint = Fingerprint()
1441 fingerprint.fingerprint = fpr
1442 session.add(fingerprint)
1443 session.commit_or_flush()
1448 __all__.append('get_or_set_fingerprint')
1450 ################################################################################
1452 # Helper routine for Keyring class
1453 def get_ldap_name(entry):
1455 for k in ["cn", "mn", "sn"]:
1457 if ret and ret[0] != "" and ret[0] != "-":
1459 return " ".join(name)
1461 ################################################################################
1463 class Keyring(object):
1464 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1465 " --with-colons --fingerprint --fingerprint"
1470 def __init__(self, *args, **kwargs):
1474 return '<Keyring %s>' % self.keyring_name
1476 def de_escape_gpg_str(self, txt):
1477 esclist = re.split(r'(\\x..)', txt)
1478 for x in range(1,len(esclist),2):
1479 esclist[x] = "%c" % (int(esclist[x][2:],16))
1480 return "".join(esclist)
1482 def parse_address(self, uid):
1483 """parses uid and returns a tuple of real name and email address"""
1485 (name, address) = email.Utils.parseaddr(uid)
1486 name = re.sub(r"\s*[(].*[)]", "", name)
1487 name = self.de_escape_gpg_str(name)
1490 return (name, address)
1492 def load_keys(self, keyring):
1493 if not self.keyring_id:
1494 raise Exception('Must be initialized with database information')
1496 k = os.popen(self.gpg_invocation % keyring, "r")
1500 for line in k.xreadlines():
1501 field = line.split(":")
1502 if field[0] == "pub":
1505 (name, addr) = self.parse_address(field[9])
1507 self.keys[key]["email"] = addr
1508 self.keys[key]["name"] = name
1509 self.keys[key]["fingerprints"] = []
1511 elif key and field[0] == "sub" and len(field) >= 12:
1512 signingkey = ("s" in field[11])
1513 elif key and field[0] == "uid":
1514 (name, addr) = self.parse_address(field[9])
1515 if "email" not in self.keys[key] and "@" in addr:
1516 self.keys[key]["email"] = addr
1517 self.keys[key]["name"] = name
1518 elif signingkey and field[0] == "fpr":
1519 self.keys[key]["fingerprints"].append(field[9])
1520 self.fpr_lookup[field[9]] = key
1522 def import_users_from_ldap(self, session):
1526 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1527 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1529 l = ldap.open(LDAPServer)
1530 l.simple_bind_s("","")
1531 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1532 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1533 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1535 ldap_fin_uid_id = {}
1542 uid = entry["uid"][0]
1543 name = get_ldap_name(entry)
1544 fingerprints = entry["keyFingerPrint"]
1546 for f in fingerprints:
1547 key = self.fpr_lookup.get(f, None)
1548 if key not in self.keys:
1550 self.keys[key]["uid"] = uid
1554 keyid = get_or_set_uid(uid, session).uid_id
1555 byuid[keyid] = (uid, name)
1556 byname[uid] = (keyid, name)
1558 return (byname, byuid)
1560 def generate_users_from_keyring(self, format, session):
1564 for x in self.keys.keys():
1565 if "email" not in self.keys[x]:
1567 self.keys[x]["uid"] = format % "invalid-uid"
1569 uid = format % self.keys[x]["email"]
1570 keyid = get_or_set_uid(uid, session).uid_id
1571 byuid[keyid] = (uid, self.keys[x]["name"])
1572 byname[uid] = (keyid, self.keys[x]["name"])
1573 self.keys[x]["uid"] = uid
1576 uid = format % "invalid-uid"
1577 keyid = get_or_set_uid(uid, session).uid_id
1578 byuid[keyid] = (uid, "ungeneratable user id")
1579 byname[uid] = (keyid, "ungeneratable user id")
1581 return (byname, byuid)
1583 __all__.append('Keyring')
1586 def get_keyring(keyring, session=None):
1588 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1589 If C{keyring} already has an entry, simply return the existing Keyring
1591 @type keyring: string
1592 @param keyring: the keyring name
1595 @return: the Keyring object for this keyring
1598 q = session.query(Keyring).filter_by(keyring_name=keyring)
1602 except NoResultFound:
1605 __all__.append('get_keyring')
1607 ################################################################################
1609 class KeyringACLMap(object):
1610 def __init__(self, *args, **kwargs):
1614 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1616 __all__.append('KeyringACLMap')
1618 ################################################################################
1620 class DBChange(object):
1621 def __init__(self, *args, **kwargs):
1625 return '<DBChange %s>' % self.changesname
1627 def clean_from_queue(self):
1628 session = DBConn().session().object_session(self)
1630 # Remove changes_pool_files entries
1633 # Remove changes_pending_files references
1636 # Clear out of queue
1637 self.in_queue = None
1638 self.approved_for_id = None
1640 __all__.append('DBChange')
1643 def get_dbchange(filename, session=None):
1645 returns DBChange object for given C{filename}.
1647 @type filename: string
1648 @param filename: the name of the file
1650 @type session: Session
1651 @param session: Optional SQLA session object (a temporary one will be
1652 generated if not supplied)
1655 @return: DBChange object for the given filename (C{None} if not present)
1658 q = session.query(DBChange).filter_by(changesname=filename)
1662 except NoResultFound:
1665 __all__.append('get_dbchange')
1667 ################################################################################
1669 class Location(ORMObject):
1670 def __init__(self, path = None):
1672 # the column 'type' should go away, see comment at mapper
1673 self.archive_type = 'pool'
1675 def properties(self):
1676 return ['path', 'archive_type', 'component', 'files_count']
1678 def not_null_constraints(self):
1679 return ['path', 'archive_type']
1681 __all__.append('Location')
1684 def get_location(location, component=None, archive=None, session=None):
1686 Returns Location object for the given combination of location, component
1689 @type location: string
1690 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1692 @type component: string
1693 @param component: the component name (if None, no restriction applied)
1695 @type archive: string
1696 @param archive: the archive name (if None, no restriction applied)
1698 @rtype: Location / None
1699 @return: Either a Location object or None if one can't be found
1702 q = session.query(Location).filter_by(path=location)
1704 if archive is not None:
1705 q = q.join(Archive).filter_by(archive_name=archive)
1707 if component is not None:
1708 q = q.join(Component).filter_by(component_name=component)
1712 except NoResultFound:
1715 __all__.append('get_location')
1717 ################################################################################
1719 class Maintainer(ORMObject):
1720 def __init__(self, name = None):
1723 def properties(self):
1724 return ['name', 'maintainer_id']
1726 def not_null_constraints(self):
1729 def get_split_maintainer(self):
1730 if not hasattr(self, 'name') or self.name is None:
1731 return ('', '', '', '')
1733 return fix_maintainer(self.name.strip())
1735 __all__.append('Maintainer')
1738 def get_or_set_maintainer(name, session=None):
1740 Returns Maintainer object for given maintainer name.
1742 If no matching maintainer name is found, a row is inserted.
1745 @param name: The maintainer name to add
1747 @type session: SQLAlchemy
1748 @param session: Optional SQL session object (a temporary one will be
1749 generated if not supplied). If not passed, a commit will be performed at
1750 the end of the function, otherwise the caller is responsible for commiting.
1751 A flush will be performed either way.
1754 @return: the Maintainer object for the given maintainer
1757 q = session.query(Maintainer).filter_by(name=name)
1760 except NoResultFound:
1761 maintainer = Maintainer()
1762 maintainer.name = name
1763 session.add(maintainer)
1764 session.commit_or_flush()
1769 __all__.append('get_or_set_maintainer')
1772 def get_maintainer(maintainer_id, session=None):
1774 Return the name of the maintainer behind C{maintainer_id} or None if that
1775 maintainer_id is invalid.
1777 @type maintainer_id: int
1778 @param maintainer_id: the id of the maintainer
1781 @return: the Maintainer with this C{maintainer_id}
1784 return session.query(Maintainer).get(maintainer_id)
1786 __all__.append('get_maintainer')
1788 ################################################################################
1790 class NewComment(object):
1791 def __init__(self, *args, **kwargs):
1795 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1797 __all__.append('NewComment')
1800 def has_new_comment(package, version, session=None):
1802 Returns true if the given combination of C{package}, C{version} has a comment.
1804 @type package: string
1805 @param package: name of the package
1807 @type version: string
1808 @param version: package version
1810 @type session: Session
1811 @param session: Optional SQLA session object (a temporary one will be
1812 generated if not supplied)
1818 q = session.query(NewComment)
1819 q = q.filter_by(package=package)
1820 q = q.filter_by(version=version)
1822 return bool(q.count() > 0)
1824 __all__.append('has_new_comment')
1827 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1829 Returns (possibly empty) list of NewComment objects for the given
1832 @type package: string (optional)
1833 @param package: name of the package
1835 @type version: string (optional)
1836 @param version: package version
1838 @type comment_id: int (optional)
1839 @param comment_id: An id of a comment
1841 @type session: Session
1842 @param session: Optional SQLA session object (a temporary one will be
1843 generated if not supplied)
1846 @return: A (possibly empty) list of NewComment objects will be returned
1849 q = session.query(NewComment)
1850 if package is not None: q = q.filter_by(package=package)
1851 if version is not None: q = q.filter_by(version=version)
1852 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1856 __all__.append('get_new_comments')
1858 ################################################################################
1860 class Override(object):
1861 def __init__(self, *args, **kwargs):
1865 return '<Override %s (%s)>' % (self.package, self.suite_id)
1867 __all__.append('Override')
1870 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1872 Returns Override object for the given parameters
1874 @type package: string
1875 @param package: The name of the package
1877 @type suite: string, list or None
1878 @param suite: The name of the suite (or suites if a list) to limit to. If
1879 None, don't limit. Defaults to None.
1881 @type component: string, list or None
1882 @param component: The name of the component (or components if a list) to
1883 limit to. If None, don't limit. Defaults to None.
1885 @type overridetype: string, list or None
1886 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1887 limit to. If None, don't limit. Defaults to None.
1889 @type session: Session
1890 @param session: Optional SQLA session object (a temporary one will be
1891 generated if not supplied)
1894 @return: A (possibly empty) list of Override objects will be returned
1897 q = session.query(Override)
1898 q = q.filter_by(package=package)
1900 if suite is not None:
1901 if not isinstance(suite, list): suite = [suite]
1902 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1904 if component is not None:
1905 if not isinstance(component, list): component = [component]
1906 q = q.join(Component).filter(Component.component_name.in_(component))
1908 if overridetype is not None:
1909 if not isinstance(overridetype, list): overridetype = [overridetype]
1910 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1914 __all__.append('get_override')
1917 ################################################################################
1919 class OverrideType(object):
1920 def __init__(self, *args, **kwargs):
1924 return '<OverrideType %s>' % self.overridetype
1926 __all__.append('OverrideType')
1929 def get_override_type(override_type, session=None):
1931 Returns OverrideType object for given C{override type}.
1933 @type override_type: string
1934 @param override_type: The name of the override type
1936 @type session: Session
1937 @param session: Optional SQLA session object (a temporary one will be
1938 generated if not supplied)
1941 @return: the database id for the given override type
1944 q = session.query(OverrideType).filter_by(overridetype=override_type)
1948 except NoResultFound:
1951 __all__.append('get_override_type')
1953 ################################################################################
1955 class DebContents(object):
1956 def __init__(self, *args, **kwargs):
1960 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1962 __all__.append('DebContents')
1965 class UdebContents(object):
1966 def __init__(self, *args, **kwargs):
1970 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1972 __all__.append('UdebContents')
1974 class PendingBinContents(object):
1975 def __init__(self, *args, **kwargs):
1979 return '<PendingBinContents %s>' % self.contents_id
1981 __all__.append('PendingBinContents')
1983 def insert_pending_content_paths(package,
1988 Make sure given paths are temporarily associated with given
1992 @param package: the package to associate with should have been read in from the binary control file
1993 @type fullpaths: list
1994 @param fullpaths: the list of paths of the file being associated with the binary
1995 @type session: SQLAlchemy session
1996 @param session: Optional SQLAlchemy session. If this is passed, the caller
1997 is responsible for ensuring a transaction has begun and committing the
1998 results or rolling back based on the result code. If not passed, a commit
1999 will be performed at the end of the function
2001 @return: True upon success, False if there is a problem
2004 privatetrans = False
2007 session = DBConn().session()
2011 arch = get_architecture(package['Architecture'], session)
2012 arch_id = arch.arch_id
2014 # Remove any already existing recorded files for this package
2015 q = session.query(PendingBinContents)
2016 q = q.filter_by(package=package['Package'])
2017 q = q.filter_by(version=package['Version'])
2018 q = q.filter_by(architecture=arch_id)
2021 for fullpath in fullpaths:
2023 if fullpath.startswith( "./" ):
2024 fullpath = fullpath[2:]
2026 pca = PendingBinContents()
2027 pca.package = package['Package']
2028 pca.version = package['Version']
2030 pca.architecture = arch_id
2033 pca.type = 8 # gross
2035 pca.type = 7 # also gross
2038 # Only commit if we set up the session ourself
2046 except Exception, e:
2047 traceback.print_exc()
2049 # Only rollback if we set up the session ourself
2056 __all__.append('insert_pending_content_paths')
2058 ################################################################################
2060 class PolicyQueue(object):
2061 def __init__(self, *args, **kwargs):
2065 return '<PolicyQueue %s>' % self.queue_name
2067 __all__.append('PolicyQueue')
2070 def get_policy_queue(queuename, session=None):
2072 Returns PolicyQueue object for given C{queue name}
2074 @type queuename: string
2075 @param queuename: The name of the queue
2077 @type session: Session
2078 @param session: Optional SQLA session object (a temporary one will be
2079 generated if not supplied)
2082 @return: PolicyQueue object for the given queue
2085 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2089 except NoResultFound:
2092 __all__.append('get_policy_queue')
2095 def get_policy_queue_from_path(pathname, session=None):
2097 Returns PolicyQueue object for given C{path name}
2099 @type queuename: string
2100 @param queuename: The path
2102 @type session: Session
2103 @param session: Optional SQLA session object (a temporary one will be
2104 generated if not supplied)
2107 @return: PolicyQueue object for the given queue
2110 q = session.query(PolicyQueue).filter_by(path=pathname)
2114 except NoResultFound:
2117 __all__.append('get_policy_queue_from_path')
2119 ################################################################################
2121 class Priority(object):
2122 def __init__(self, *args, **kwargs):
2125 def __eq__(self, val):
2126 if isinstance(val, str):
2127 return (self.priority == val)
2128 # This signals to use the normal comparison operator
2129 return NotImplemented
2131 def __ne__(self, val):
2132 if isinstance(val, str):
2133 return (self.priority != val)
2134 # This signals to use the normal comparison operator
2135 return NotImplemented
2138 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2140 __all__.append('Priority')
2143 def get_priority(priority, session=None):
2145 Returns Priority object for given C{priority name}.
2147 @type priority: string
2148 @param priority: The name of the priority
2150 @type session: Session
2151 @param session: Optional SQLA session object (a temporary one will be
2152 generated if not supplied)
2155 @return: Priority object for the given priority
2158 q = session.query(Priority).filter_by(priority=priority)
2162 except NoResultFound:
2165 __all__.append('get_priority')
2168 def get_priorities(session=None):
2170 Returns dictionary of priority names -> id mappings
2172 @type session: Session
2173 @param session: Optional SQL session object (a temporary one will be
2174 generated if not supplied)
2177 @return: dictionary of priority names -> id mappings
2181 q = session.query(Priority)
2183 ret[x.priority] = x.priority_id
2187 __all__.append('get_priorities')
2189 ################################################################################
2191 class Section(object):
2192 def __init__(self, *args, **kwargs):
2195 def __eq__(self, val):
2196 if isinstance(val, str):
2197 return (self.section == val)
2198 # This signals to use the normal comparison operator
2199 return NotImplemented
2201 def __ne__(self, val):
2202 if isinstance(val, str):
2203 return (self.section != val)
2204 # This signals to use the normal comparison operator
2205 return NotImplemented
2208 return '<Section %s>' % self.section
2210 __all__.append('Section')
2213 def get_section(section, session=None):
2215 Returns Section object for given C{section name}.
2217 @type section: string
2218 @param section: The name of the section
2220 @type session: Session
2221 @param session: Optional SQLA session object (a temporary one will be
2222 generated if not supplied)
2225 @return: Section object for the given section name
2228 q = session.query(Section).filter_by(section=section)
2232 except NoResultFound:
2235 __all__.append('get_section')
2238 def get_sections(session=None):
2240 Returns dictionary of section names -> id mappings
2242 @type session: Session
2243 @param session: Optional SQL session object (a temporary one will be
2244 generated if not supplied)
2247 @return: dictionary of section names -> id mappings
2251 q = session.query(Section)
2253 ret[x.section] = x.section_id
2257 __all__.append('get_sections')
2259 ################################################################################
2261 class DBSource(ORMObject):
2262 def __init__(self, source = None, version = None, maintainer = None, \
2263 changedby = None, poolfile = None, install_date = None):
2264 self.source = source
2265 self.version = version
2266 self.maintainer = maintainer
2267 self.changedby = changedby
2268 self.poolfile = poolfile
2269 self.install_date = install_date
2271 def properties(self):
2272 return ['source', 'source_id', 'maintainer', 'changedby', \
2273 'fingerprint', 'poolfile', 'version', 'suites_count', \
2274 'install_date', 'binaries_count']
2276 def not_null_constraints(self):
2277 return ['source', 'version', 'install_date', 'maintainer', \
2278 'changedby', 'poolfile', 'install_date']
2280 __all__.append('DBSource')
2283 def source_exists(source, source_version, suites = ["any"], session=None):
2285 Ensure that source exists somewhere in the archive for the binary
2286 upload being processed.
2287 1. exact match => 1.0-3
2288 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2290 @type source: string
2291 @param source: source name
2293 @type source_version: string
2294 @param source_version: expected source version
2297 @param suites: list of suites to check in, default I{any}
2299 @type session: Session
2300 @param session: Optional SQLA session object (a temporary one will be
2301 generated if not supplied)
2304 @return: returns 1 if a source with expected version is found, otherwise 0
2311 from daklib.regexes import re_bin_only_nmu
2312 orig_source_version = re_bin_only_nmu.sub('', source_version)
2314 for suite in suites:
2315 q = session.query(DBSource).filter_by(source=source). \
2316 filter(DBSource.version.in_([source_version, orig_source_version]))
2318 # source must exist in suite X, or in some other suite that's
2319 # mapped to X, recursively... silent-maps are counted too,
2320 # unreleased-maps aren't.
2321 maps = cnf.ValueList("SuiteMappings")[:]
2323 maps = [ m.split() for m in maps ]
2324 maps = [ (x[1], x[2]) for x in maps
2325 if x[0] == "map" or x[0] == "silent-map" ]
2328 if x[1] in s and x[0] not in s:
2331 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2336 # No source found so return not ok
2341 __all__.append('source_exists')
2344 def get_suites_source_in(source, session=None):
2346 Returns list of Suite objects which given C{source} name is in
2349 @param source: DBSource package name to search for
2352 @return: list of Suite objects for the given source
2355 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2357 __all__.append('get_suites_source_in')
2360 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2362 Returns list of DBSource objects for given C{source} name and other parameters
2365 @param source: DBSource package name to search for
2367 @type version: str or None
2368 @param version: DBSource version name to search for or None if not applicable
2370 @type dm_upload_allowed: bool
2371 @param dm_upload_allowed: If None, no effect. If True or False, only
2372 return packages with that dm_upload_allowed setting
2374 @type session: Session
2375 @param session: Optional SQL session object (a temporary one will be
2376 generated if not supplied)
2379 @return: list of DBSource objects for the given name (may be empty)
2382 q = session.query(DBSource).filter_by(source=source)
2384 if version is not None:
2385 q = q.filter_by(version=version)
2387 if dm_upload_allowed is not None:
2388 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2392 __all__.append('get_sources_from_name')
2394 # FIXME: This function fails badly if it finds more than 1 source package and
2395 # its implementation is trivial enough to be inlined.
2397 def get_source_in_suite(source, suite, session=None):
2399 Returns a DBSource object for a combination of C{source} and C{suite}.
2401 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2402 - B{suite} - a suite name, eg. I{unstable}
2404 @type source: string
2405 @param source: source package name
2408 @param suite: the suite name
2411 @return: the version for I{source} in I{suite}
2415 q = get_suite(suite, session).get_sources(source)
2418 except NoResultFound:
2421 __all__.append('get_source_in_suite')
2423 ################################################################################
2426 def add_dsc_to_db(u, filename, session=None):
2427 entry = u.pkg.files[filename]
2431 source.source = u.pkg.dsc["source"]
2432 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2433 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2434 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2435 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2436 source.install_date = datetime.now().date()
2438 dsc_component = entry["component"]
2439 dsc_location_id = entry["location id"]
2441 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2443 # Set up a new poolfile if necessary
2444 if not entry.has_key("files id") or not entry["files id"]:
2445 filename = entry["pool name"] + filename
2446 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2448 pfs.append(poolfile)
2449 entry["files id"] = poolfile.file_id
2451 source.poolfile_id = entry["files id"]
2454 suite_names = u.pkg.changes["distribution"].keys()
2455 source.suites = session.query(Suite). \
2456 filter(Suite.suite_name.in_(suite_names)).all()
2458 # Add the source files to the DB (files and dsc_files)
2460 dscfile.source_id = source.source_id
2461 dscfile.poolfile_id = entry["files id"]
2462 session.add(dscfile)
2464 for dsc_file, dentry in u.pkg.dsc_files.items():
2466 df.source_id = source.source_id
2468 # If the .orig tarball is already in the pool, it's
2469 # files id is stored in dsc_files by check_dsc().
2470 files_id = dentry.get("files id", None)
2472 # Find the entry in the files hash
2473 # TODO: Bail out here properly
2475 for f, e in u.pkg.files.items():
2480 if files_id is None:
2481 filename = dfentry["pool name"] + dsc_file
2483 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2484 # FIXME: needs to check for -1/-2 and or handle exception
2485 if found and obj is not None:
2486 files_id = obj.file_id
2489 # If still not found, add it
2490 if files_id is None:
2491 # HACK: Force sha1sum etc into dentry
2492 dentry["sha1sum"] = dfentry["sha1sum"]
2493 dentry["sha256sum"] = dfentry["sha256sum"]
2494 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2495 pfs.append(poolfile)
2496 files_id = poolfile.file_id
2498 poolfile = get_poolfile_by_id(files_id, session)
2499 if poolfile is None:
2500 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2501 pfs.append(poolfile)
2503 df.poolfile_id = files_id
2506 # Add the src_uploaders to the DB
2507 uploader_ids = [source.maintainer_id]
2508 if u.pkg.dsc.has_key("uploaders"):
2509 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2511 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2514 for up_id in uploader_ids:
2515 if added_ids.has_key(up_id):
2517 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2523 su.maintainer_id = up_id
2524 su.source_id = source.source_id
2529 return source, dsc_component, dsc_location_id, pfs
2531 __all__.append('add_dsc_to_db')
2534 def add_deb_to_db(u, filename, session=None):
2536 Contrary to what you might expect, this routine deals with both
2537 debs and udebs. That info is in 'dbtype', whilst 'type' is
2538 'deb' for both of them
2541 entry = u.pkg.files[filename]
2544 bin.package = entry["package"]
2545 bin.version = entry["version"]
2546 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2547 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2548 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2549 bin.binarytype = entry["dbtype"]
2552 filename = entry["pool name"] + filename
2553 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2554 if not entry.get("location id", None):
2555 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2557 if entry.get("files id", None):
2558 poolfile = get_poolfile_by_id(bin.poolfile_id)
2559 bin.poolfile_id = entry["files id"]
2561 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2562 bin.poolfile_id = entry["files id"] = poolfile.file_id
2565 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2566 if len(bin_sources) != 1:
2567 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2568 (bin.package, bin.version, entry["architecture"],
2569 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2571 bin.source_id = bin_sources[0].source_id
2573 # Add and flush object so it has an ID
2577 # Add BinAssociations
2578 for suite_name in u.pkg.changes["distribution"].keys():
2579 ba = BinAssociation()
2580 ba.binary_id = bin.binary_id
2581 ba.suite_id = get_suite(suite_name).suite_id
2586 # Deal with contents - disabled for now
2587 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2589 # print "REJECT\nCould not determine contents of package %s" % bin.package
2590 # session.rollback()
2591 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2595 __all__.append('add_deb_to_db')
2597 ################################################################################
2599 class SourceACL(object):
2600 def __init__(self, *args, **kwargs):
2604 return '<SourceACL %s>' % self.source_acl_id
2606 __all__.append('SourceACL')
2608 ################################################################################
2610 class SrcFormat(object):
2611 def __init__(self, *args, **kwargs):
2615 return '<SrcFormat %s>' % (self.format_name)
2617 __all__.append('SrcFormat')
2619 ################################################################################
2621 class SrcUploader(object):
2622 def __init__(self, *args, **kwargs):
2626 return '<SrcUploader %s>' % self.uploader_id
2628 __all__.append('SrcUploader')
2630 ################################################################################
2632 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2633 ('SuiteID', 'suite_id'),
2634 ('Version', 'version'),
2635 ('Origin', 'origin'),
2637 ('Description', 'description'),
2638 ('Untouchable', 'untouchable'),
2639 ('Announce', 'announce'),
2640 ('Codename', 'codename'),
2641 ('OverrideCodename', 'overridecodename'),
2642 ('ValidTime', 'validtime'),
2643 ('Priority', 'priority'),
2644 ('NotAutomatic', 'notautomatic'),
2645 ('CopyChanges', 'copychanges'),
2646 ('OverrideSuite', 'overridesuite')]
2648 # Why the heck don't we have any UNIQUE constraints in table suite?
2649 # TODO: Add UNIQUE constraints for appropriate columns.
2650 class Suite(ORMObject):
2651 def __init__(self, suite_name = None, version = None):
2652 self.suite_name = suite_name
2653 self.version = version
2655 def properties(self):
2656 return ['suite_name', 'version', 'sources_count', 'binaries_count']
2658 def not_null_constraints(self):
2659 return ['suite_name', 'version']
2661 def __eq__(self, val):
2662 if isinstance(val, str):
2663 return (self.suite_name == val)
2664 # This signals to use the normal comparison operator
2665 return NotImplemented
2667 def __ne__(self, val):
2668 if isinstance(val, str):
2669 return (self.suite_name != val)
2670 # This signals to use the normal comparison operator
2671 return NotImplemented
2675 for disp, field in SUITE_FIELDS:
2676 val = getattr(self, field, None)
2678 ret.append("%s: %s" % (disp, val))
2680 return "\n".join(ret)
2682 def get_architectures(self, skipsrc=False, skipall=False):
2684 Returns list of Architecture objects
2686 @type skipsrc: boolean
2687 @param skipsrc: Whether to skip returning the 'source' architecture entry
2690 @type skipall: boolean
2691 @param skipall: Whether to skip returning the 'all' architecture entry
2695 @return: list of Architecture objects for the given name (may be empty)
2698 q = object_session(self).query(Architecture).with_parent(self)
2700 q = q.filter(Architecture.arch_string != 'source')
2702 q = q.filter(Architecture.arch_string != 'all')
2703 return q.order_by(Architecture.arch_string).all()
2705 def get_sources(self, source):
2707 Returns a query object representing DBSource that is part of C{suite}.
2709 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2711 @type source: string
2712 @param source: source package name
2714 @rtype: sqlalchemy.orm.query.Query
2715 @return: a query of DBSource
2719 session = object_session(self)
2720 return session.query(DBSource).filter_by(source = source). \
2723 __all__.append('Suite')
2726 def get_suite(suite, session=None):
2728 Returns Suite object for given C{suite name}.
2731 @param suite: The name of the suite
2733 @type session: Session
2734 @param session: Optional SQLA session object (a temporary one will be
2735 generated if not supplied)
2738 @return: Suite object for the requested suite name (None if not present)
2741 q = session.query(Suite).filter_by(suite_name=suite)
2745 except NoResultFound:
2748 __all__.append('get_suite')
2750 ################################################################################
2752 # TODO: should be removed because the implementation is too trivial
2754 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2756 Returns list of Architecture objects for given C{suite} name
2759 @param suite: Suite name to search for
2761 @type skipsrc: boolean
2762 @param skipsrc: Whether to skip returning the 'source' architecture entry
2765 @type skipall: boolean
2766 @param skipall: Whether to skip returning the 'all' architecture entry
2769 @type session: Session
2770 @param session: Optional SQL session object (a temporary one will be
2771 generated if not supplied)
2774 @return: list of Architecture objects for the given name (may be empty)
2777 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2779 __all__.append('get_suite_architectures')
2781 ################################################################################
2783 class SuiteSrcFormat(object):
2784 def __init__(self, *args, **kwargs):
2788 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2790 __all__.append('SuiteSrcFormat')
2793 def get_suite_src_formats(suite, session=None):
2795 Returns list of allowed SrcFormat for C{suite}.
2798 @param suite: Suite name to search for
2800 @type session: Session
2801 @param session: Optional SQL session object (a temporary one will be
2802 generated if not supplied)
2805 @return: the list of allowed source formats for I{suite}
2808 q = session.query(SrcFormat)
2809 q = q.join(SuiteSrcFormat)
2810 q = q.join(Suite).filter_by(suite_name=suite)
2811 q = q.order_by('format_name')
2815 __all__.append('get_suite_src_formats')
2817 ################################################################################
2819 class Uid(ORMObject):
2820 def __init__(self, uid = None, name = None):
2824 def __eq__(self, val):
2825 if isinstance(val, str):
2826 return (self.uid == val)
2827 # This signals to use the normal comparison operator
2828 return NotImplemented
2830 def __ne__(self, val):
2831 if isinstance(val, str):
2832 return (self.uid != val)
2833 # This signals to use the normal comparison operator
2834 return NotImplemented
2836 def properties(self):
2837 return ['uid', 'name', 'fingerprint']
2839 def not_null_constraints(self):
2842 __all__.append('Uid')
2845 def get_or_set_uid(uidname, session=None):
2847 Returns uid object for given uidname.
2849 If no matching uidname is found, a row is inserted.
2851 @type uidname: string
2852 @param uidname: The uid to add
2854 @type session: SQLAlchemy
2855 @param session: Optional SQL session object (a temporary one will be
2856 generated if not supplied). If not passed, a commit will be performed at
2857 the end of the function, otherwise the caller is responsible for commiting.
2860 @return: the uid object for the given uidname
2863 q = session.query(Uid).filter_by(uid=uidname)
2867 except NoResultFound:
2871 session.commit_or_flush()
2876 __all__.append('get_or_set_uid')
2879 def get_uid_from_fingerprint(fpr, session=None):
2880 q = session.query(Uid)
2881 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2885 except NoResultFound:
2888 __all__.append('get_uid_from_fingerprint')
2890 ################################################################################
2892 class UploadBlock(object):
2893 def __init__(self, *args, **kwargs):
2897 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2899 __all__.append('UploadBlock')
2901 ################################################################################
2903 class DBConn(object):
2905 database module init.
2909 def __init__(self, *args, **kwargs):
2910 self.__dict__ = self.__shared_state
2912 if not getattr(self, 'initialised', False):
2913 self.initialised = True
2914 self.debug = kwargs.has_key('debug')
2917 def __setuptables(self):
2918 tables_with_primary = (
2929 'changes_pending_binaries',
2930 'changes_pending_files',
2931 'changes_pending_source',
2941 'pending_bin_contents',
2953 # The following tables have primary keys but sqlalchemy
2954 # version 0.5 fails to reflect them correctly with database
2955 # versions before upgrade #41.
2957 #'build_queue_files',
2960 tables_no_primary = (
2962 'changes_pending_files_map',
2963 'changes_pending_source_files',
2964 'changes_pool_files',
2967 'suite_architectures',
2968 'suite_src_formats',
2969 'suite_build_queue_copy',
2971 # see the comment above
2973 'build_queue_files',
2977 'almost_obsolete_all_associations',
2978 'almost_obsolete_src_associations',
2979 'any_associations_source',
2980 'bin_assoc_by_arch',
2981 'bin_associations_binaries',
2982 'binaries_suite_arch',
2983 'binfiles_suite_component_arch',
2986 'newest_all_associations',
2987 'newest_any_associations',
2989 'newest_src_association',
2990 'obsolete_all_associations',
2991 'obsolete_any_associations',
2992 'obsolete_any_by_all_associations',
2993 'obsolete_src_associations',
2995 'src_associations_bin',
2996 'src_associations_src',
2997 'suite_arch_by_name',
3000 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
3001 # correctly and that is why we have to use a workaround. It can
3002 # be removed as soon as we switch to version 0.6.
3003 for table_name in tables_with_primary:
3004 table = Table(table_name, self.db_meta, \
3005 Column('id', Integer, primary_key = True), \
3006 autoload=True, useexisting=True)
3007 setattr(self, 'tbl_%s' % table_name, table)
3009 for table_name in tables_no_primary:
3010 table = Table(table_name, self.db_meta, autoload=True)
3011 setattr(self, 'tbl_%s' % table_name, table)
3013 for view_name in views:
3014 view = Table(view_name, self.db_meta, autoload=True)
3015 setattr(self, 'view_%s' % view_name, view)
3017 def __setupmappers(self):
3018 mapper(Architecture, self.tbl_architecture,
3019 properties = dict(arch_id = self.tbl_architecture.c.id,
3020 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3021 order_by='suite_name',
3022 backref=backref('architectures', order_by='arch_string'))),
3023 extension = validator)
3025 mapper(Archive, self.tbl_archive,
3026 properties = dict(archive_id = self.tbl_archive.c.id,
3027 archive_name = self.tbl_archive.c.name))
3029 mapper(BinAssociation, self.tbl_bin_associations,
3030 properties = dict(ba_id = self.tbl_bin_associations.c.id,
3031 suite_id = self.tbl_bin_associations.c.suite,
3032 suite = relation(Suite),
3033 binary_id = self.tbl_bin_associations.c.bin,
3034 binary = relation(DBBinary)))
3036 mapper(PendingBinContents, self.tbl_pending_bin_contents,
3037 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3038 filename = self.tbl_pending_bin_contents.c.filename,
3039 package = self.tbl_pending_bin_contents.c.package,
3040 version = self.tbl_pending_bin_contents.c.version,
3041 arch = self.tbl_pending_bin_contents.c.arch,
3042 otype = self.tbl_pending_bin_contents.c.type))
3044 mapper(DebContents, self.tbl_deb_contents,
3045 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3046 package=self.tbl_deb_contents.c.package,
3047 suite=self.tbl_deb_contents.c.suite,
3048 arch=self.tbl_deb_contents.c.arch,
3049 section=self.tbl_deb_contents.c.section,
3050 filename=self.tbl_deb_contents.c.filename))
3052 mapper(UdebContents, self.tbl_udeb_contents,
3053 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3054 package=self.tbl_udeb_contents.c.package,
3055 suite=self.tbl_udeb_contents.c.suite,
3056 arch=self.tbl_udeb_contents.c.arch,
3057 section=self.tbl_udeb_contents.c.section,
3058 filename=self.tbl_udeb_contents.c.filename))
3060 mapper(BuildQueue, self.tbl_build_queue,
3061 properties = dict(queue_id = self.tbl_build_queue.c.id))
3063 mapper(BuildQueueFile, self.tbl_build_queue_files,
3064 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3065 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3067 mapper(DBBinary, self.tbl_binaries,
3068 properties = dict(binary_id = self.tbl_binaries.c.id,
3069 package = self.tbl_binaries.c.package,
3070 version = self.tbl_binaries.c.version,
3071 maintainer_id = self.tbl_binaries.c.maintainer,
3072 maintainer = relation(Maintainer),
3073 source_id = self.tbl_binaries.c.source,
3074 source = relation(DBSource, backref='binaries'),
3075 arch_id = self.tbl_binaries.c.architecture,
3076 architecture = relation(Architecture),
3077 poolfile_id = self.tbl_binaries.c.file,
3078 poolfile = relation(PoolFile),
3079 binarytype = self.tbl_binaries.c.type,
3080 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3081 fingerprint = relation(Fingerprint),
3082 install_date = self.tbl_binaries.c.install_date,
3083 suites = relation(Suite, secondary=self.tbl_bin_associations,
3084 backref=backref('binaries', lazy='dynamic')),
3085 binassociations = relation(BinAssociation,
3086 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))),
3087 extension = validator)
3089 mapper(BinaryACL, self.tbl_binary_acl,
3090 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3092 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3093 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3094 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3095 architecture = relation(Architecture)))
3097 mapper(Component, self.tbl_component,
3098 properties = dict(component_id = self.tbl_component.c.id,
3099 component_name = self.tbl_component.c.name))
3101 mapper(DBConfig, self.tbl_config,
3102 properties = dict(config_id = self.tbl_config.c.id))
3104 mapper(DSCFile, self.tbl_dsc_files,
3105 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3106 source_id = self.tbl_dsc_files.c.source,
3107 source = relation(DBSource),
3108 poolfile_id = self.tbl_dsc_files.c.file,
3109 poolfile = relation(PoolFile)))
3111 mapper(PoolFile, self.tbl_files,
3112 properties = dict(file_id = self.tbl_files.c.id,
3113 filesize = self.tbl_files.c.size,
3114 location_id = self.tbl_files.c.location,
3115 location = relation(Location,
3116 # using lazy='dynamic' in the back
3117 # reference because we have A LOT of
3118 # files in one location
3119 backref=backref('files', lazy='dynamic'))),
3120 extension = validator)
3122 mapper(Fingerprint, self.tbl_fingerprint,
3123 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3124 uid_id = self.tbl_fingerprint.c.uid,
3125 uid = relation(Uid),
3126 keyring_id = self.tbl_fingerprint.c.keyring,
3127 keyring = relation(Keyring),
3128 source_acl = relation(SourceACL),
3129 binary_acl = relation(BinaryACL)),
3130 extension = validator)
3132 mapper(Keyring, self.tbl_keyrings,
3133 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3134 keyring_id = self.tbl_keyrings.c.id))
3136 mapper(DBChange, self.tbl_changes,
3137 properties = dict(change_id = self.tbl_changes.c.id,
3138 poolfiles = relation(PoolFile,
3139 secondary=self.tbl_changes_pool_files,
3140 backref="changeslinks"),
3141 seen = self.tbl_changes.c.seen,
3142 source = self.tbl_changes.c.source,
3143 binaries = self.tbl_changes.c.binaries,
3144 architecture = self.tbl_changes.c.architecture,
3145 distribution = self.tbl_changes.c.distribution,
3146 urgency = self.tbl_changes.c.urgency,
3147 maintainer = self.tbl_changes.c.maintainer,
3148 changedby = self.tbl_changes.c.changedby,
3149 date = self.tbl_changes.c.date,
3150 version = self.tbl_changes.c.version,
3151 files = relation(ChangePendingFile,
3152 secondary=self.tbl_changes_pending_files_map,
3153 backref="changesfile"),
3154 in_queue_id = self.tbl_changes.c.in_queue,
3155 in_queue = relation(PolicyQueue,
3156 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3157 approved_for_id = self.tbl_changes.c.approved_for))
3159 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3160 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3162 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3163 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3164 filename = self.tbl_changes_pending_files.c.filename,
3165 size = self.tbl_changes_pending_files.c.size,
3166 md5sum = self.tbl_changes_pending_files.c.md5sum,
3167 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3168 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3170 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3171 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3172 change = relation(DBChange),
3173 maintainer = relation(Maintainer,
3174 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3175 changedby = relation(Maintainer,
3176 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3177 fingerprint = relation(Fingerprint),
3178 source_files = relation(ChangePendingFile,
3179 secondary=self.tbl_changes_pending_source_files,
3180 backref="pending_sources")))
3183 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3184 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3185 keyring = relation(Keyring, backref="keyring_acl_map"),
3186 architecture = relation(Architecture)))
3188 mapper(Location, self.tbl_location,
3189 properties = dict(location_id = self.tbl_location.c.id,
3190 component_id = self.tbl_location.c.component,
3191 component = relation(Component),
3192 archive_id = self.tbl_location.c.archive,
3193 archive = relation(Archive),
3194 # FIXME: the 'type' column is old cruft and
3195 # should be removed in the future.
3196 archive_type = self.tbl_location.c.type),
3197 extension = validator)
3199 mapper(Maintainer, self.tbl_maintainer,
3200 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3201 maintains_sources = relation(DBSource, backref='maintainer',
3202 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3203 changed_sources = relation(DBSource, backref='changedby',
3204 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3205 extension = validator)
3207 mapper(NewComment, self.tbl_new_comments,
3208 properties = dict(comment_id = self.tbl_new_comments.c.id))
3210 mapper(Override, self.tbl_override,
3211 properties = dict(suite_id = self.tbl_override.c.suite,
3212 suite = relation(Suite),
3213 package = self.tbl_override.c.package,
3214 component_id = self.tbl_override.c.component,
3215 component = relation(Component),
3216 priority_id = self.tbl_override.c.priority,
3217 priority = relation(Priority),
3218 section_id = self.tbl_override.c.section,
3219 section = relation(Section),
3220 overridetype_id = self.tbl_override.c.type,
3221 overridetype = relation(OverrideType)))
3223 mapper(OverrideType, self.tbl_override_type,
3224 properties = dict(overridetype = self.tbl_override_type.c.type,
3225 overridetype_id = self.tbl_override_type.c.id))
3227 mapper(PolicyQueue, self.tbl_policy_queue,
3228 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3230 mapper(Priority, self.tbl_priority,
3231 properties = dict(priority_id = self.tbl_priority.c.id))
3233 mapper(Section, self.tbl_section,
3234 properties = dict(section_id = self.tbl_section.c.id,
3235 section=self.tbl_section.c.section))
3237 mapper(DBSource, self.tbl_source,
3238 properties = dict(source_id = self.tbl_source.c.id,
3239 version = self.tbl_source.c.version,
3240 maintainer_id = self.tbl_source.c.maintainer,
3241 poolfile_id = self.tbl_source.c.file,
3242 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3243 fingerprint_id = self.tbl_source.c.sig_fpr,
3244 fingerprint = relation(Fingerprint),
3245 changedby_id = self.tbl_source.c.changedby,
3246 srcfiles = relation(DSCFile,
3247 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3248 suites = relation(Suite, secondary=self.tbl_src_associations,
3249 backref=backref('sources', lazy='dynamic')),
3250 srcuploaders = relation(SrcUploader)),
3251 extension = validator)
3253 mapper(SourceACL, self.tbl_source_acl,
3254 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3256 mapper(SrcFormat, self.tbl_src_format,
3257 properties = dict(src_format_id = self.tbl_src_format.c.id,
3258 format_name = self.tbl_src_format.c.format_name))
3260 mapper(SrcUploader, self.tbl_src_uploaders,
3261 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3262 source_id = self.tbl_src_uploaders.c.source,
3263 source = relation(DBSource,
3264 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3265 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3266 maintainer = relation(Maintainer,
3267 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3269 mapper(Suite, self.tbl_suite,
3270 properties = dict(suite_id = self.tbl_suite.c.id,
3271 policy_queue = relation(PolicyQueue),
3272 copy_queues = relation(BuildQueue,
3273 secondary=self.tbl_suite_build_queue_copy)),
3274 extension = validator)
3276 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3277 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3278 suite = relation(Suite, backref='suitesrcformats'),
3279 src_format_id = self.tbl_suite_src_formats.c.src_format,
3280 src_format = relation(SrcFormat)))
3282 mapper(Uid, self.tbl_uid,
3283 properties = dict(uid_id = self.tbl_uid.c.id,
3284 fingerprint = relation(Fingerprint)),
3285 extension = validator)
3287 mapper(UploadBlock, self.tbl_upload_blocks,
3288 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3289 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3290 uid = relation(Uid, backref="uploadblocks")))
3292 ## Connection functions
3293 def __createconn(self):
3294 from config import Config
3298 connstr = "postgres://%s" % cnf["DB::Host"]
3299 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3300 connstr += ":%s" % cnf["DB::Port"]
3301 connstr += "/%s" % cnf["DB::Name"]
3304 connstr = "postgres:///%s" % cnf["DB::Name"]
3305 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3306 connstr += "?port=%s" % cnf["DB::Port"]
3308 self.db_pg = create_engine(connstr, echo=self.debug)
3309 self.db_meta = MetaData()
3310 self.db_meta.bind = self.db_pg
3311 self.db_smaker = sessionmaker(bind=self.db_pg,
3315 self.__setuptables()
3316 self.__setupmappers()
3319 return self.db_smaker()
3321 __all__.append('DBConn')