5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
277 def get(cls, primary_key, session = None):
279 This is a support function that allows getting an object by its primary
282 Architecture.get(3[, session])
284 instead of the more verbose
286 session.query(Architecture).get(3)
288 return session.query(cls).get(primary_key)
290 __all__.append('ORMObject')
292 ################################################################################
294 class Validator(MapperExtension):
296 This class calls the validate() method for each instance for the
297 'before_update' and 'before_insert' events. A global object validator is
298 used for configuring the individual mappers.
301 def before_update(self, mapper, connection, instance):
305 def before_insert(self, mapper, connection, instance):
309 validator = Validator()
311 ################################################################################
313 class Architecture(ORMObject):
314 def __init__(self, arch_string = None, description = None):
315 self.arch_string = arch_string
316 self.description = description
318 def __eq__(self, val):
319 if isinstance(val, str):
320 return (self.arch_string== val)
321 # This signals to use the normal comparison operator
322 return NotImplemented
324 def __ne__(self, val):
325 if isinstance(val, str):
326 return (self.arch_string != val)
327 # This signals to use the normal comparison operator
328 return NotImplemented
330 def properties(self):
331 return ['arch_string', 'arch_id', 'suites_count']
333 def not_null_constraints(self):
334 return ['arch_string']
336 __all__.append('Architecture')
339 def get_architecture(architecture, session=None):
341 Returns database id for given C{architecture}.
343 @type architecture: string
344 @param architecture: The name of the architecture
346 @type session: Session
347 @param session: Optional SQLA session object (a temporary one will be
348 generated if not supplied)
351 @return: Architecture object for the given arch (None if not present)
354 q = session.query(Architecture).filter_by(arch_string=architecture)
358 except NoResultFound:
361 __all__.append('get_architecture')
363 # TODO: should be removed because the implementation is too trivial
365 def get_architecture_suites(architecture, session=None):
367 Returns list of Suite objects for given C{architecture} name
369 @type architecture: str
370 @param architecture: Architecture name to search for
372 @type session: Session
373 @param session: Optional SQL session object (a temporary one will be
374 generated if not supplied)
377 @return: list of Suite objects for the given name (may be empty)
380 return get_architecture(architecture, session).suites
382 __all__.append('get_architecture_suites')
384 ################################################################################
386 class Archive(object):
387 def __init__(self, *args, **kwargs):
391 return '<Archive %s>' % self.archive_name
393 __all__.append('Archive')
396 def get_archive(archive, session=None):
398 returns database id for given C{archive}.
400 @type archive: string
401 @param archive: the name of the arhive
403 @type session: Session
404 @param session: Optional SQLA session object (a temporary one will be
405 generated if not supplied)
408 @return: Archive object for the given name (None if not present)
411 archive = archive.lower()
413 q = session.query(Archive).filter_by(archive_name=archive)
417 except NoResultFound:
420 __all__.append('get_archive')
422 ################################################################################
424 class BinAssociation(object):
425 def __init__(self, *args, **kwargs):
429 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
431 __all__.append('BinAssociation')
433 ################################################################################
435 class BinContents(object):
436 def __init__(self, *args, **kwargs):
440 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
442 __all__.append('BinContents')
444 ################################################################################
446 class DBBinary(object):
447 def __init__(self, *args, **kwargs):
451 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
453 __all__.append('DBBinary')
456 def get_suites_binary_in(package, session=None):
458 Returns list of Suite objects which given C{package} name is in
461 @param package: DBBinary package name to search for
464 @return: list of Suite objects for the given package
467 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
469 __all__.append('get_suites_binary_in')
472 def get_binary_from_id(binary_id, session=None):
474 Returns DBBinary object for given C{id}
477 @param binary_id: Id of the required binary
479 @type session: Session
480 @param session: Optional SQLA session object (a temporary one will be
481 generated if not supplied)
484 @return: DBBinary object for the given binary (None if not present)
487 q = session.query(DBBinary).filter_by(binary_id=binary_id)
491 except NoResultFound:
494 __all__.append('get_binary_from_id')
497 def get_binaries_from_name(package, version=None, architecture=None, session=None):
499 Returns list of DBBinary objects for given C{package} name
502 @param package: DBBinary package name to search for
504 @type version: str or None
505 @param version: Version to search for (or None)
507 @type architecture: str, list or None
508 @param architecture: Architectures to limit to (or None if no limit)
510 @type session: Session
511 @param session: Optional SQL session object (a temporary one will be
512 generated if not supplied)
515 @return: list of DBBinary objects for the given name (may be empty)
518 q = session.query(DBBinary).filter_by(package=package)
520 if version is not None:
521 q = q.filter_by(version=version)
523 if architecture is not None:
524 if not isinstance(architecture, list):
525 architecture = [architecture]
526 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
532 __all__.append('get_binaries_from_name')
535 def get_binaries_from_source_id(source_id, session=None):
537 Returns list of DBBinary objects for given C{source_id}
540 @param source_id: source_id to search for
542 @type session: Session
543 @param session: Optional SQL session object (a temporary one will be
544 generated if not supplied)
547 @return: list of DBBinary objects for the given name (may be empty)
550 return session.query(DBBinary).filter_by(source_id=source_id).all()
552 __all__.append('get_binaries_from_source_id')
555 def get_binary_from_name_suite(package, suitename, session=None):
556 ### For dak examine-package
557 ### XXX: Doesn't use object API yet
559 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
560 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
561 WHERE b.package='%(package)s'
563 AND fi.location = l.id
564 AND l.component = c.id
567 AND su.suite_name %(suitename)s
568 ORDER BY b.version DESC"""
570 return session.execute(sql % {'package': package, 'suitename': suitename})
572 __all__.append('get_binary_from_name_suite')
575 def get_binary_components(package, suitename, arch, session=None):
576 # Check for packages that have moved from one component to another
577 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
578 WHERE b.package=:package AND s.suite_name=:suitename
579 AND (a.arch_string = :arch OR a.arch_string = 'all')
580 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
581 AND f.location = l.id
582 AND l.component = c.id
585 vals = {'package': package, 'suitename': suitename, 'arch': arch}
587 return session.execute(query, vals)
589 __all__.append('get_binary_components')
591 ################################################################################
593 class BinaryACL(object):
594 def __init__(self, *args, **kwargs):
598 return '<BinaryACL %s>' % self.binary_acl_id
600 __all__.append('BinaryACL')
602 ################################################################################
604 class BinaryACLMap(object):
605 def __init__(self, *args, **kwargs):
609 return '<BinaryACLMap %s>' % self.binary_acl_map_id
611 __all__.append('BinaryACLMap')
613 ################################################################################
618 ArchiveDir "%(archivepath)s";
619 OverrideDir "%(overridedir)s";
620 CacheDir "%(cachedir)s";
625 Packages::Compress ". bzip2 gzip";
626 Sources::Compress ". bzip2 gzip";
631 bindirectory "incoming"
636 BinOverride "override.sid.all3";
637 BinCacheDB "packages-accepted.db";
639 FileList "%(filelist)s";
642 Packages::Extensions ".deb .udeb";
645 bindirectory "incoming/"
648 BinOverride "override.sid.all3";
649 SrcOverride "override.sid.all3.src";
650 FileList "%(filelist)s";
654 class BuildQueue(object):
655 def __init__(self, *args, **kwargs):
659 return '<BuildQueue %s>' % self.queue_name
661 def write_metadata(self, starttime, force=False):
662 # Do we write out metafiles?
663 if not (force or self.generate_metadata):
666 session = DBConn().session().object_session(self)
668 fl_fd = fl_name = ac_fd = ac_name = None
670 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
671 startdir = os.getcwd()
674 # Grab files we want to include
675 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
676 # Write file list with newer files
677 (fl_fd, fl_name) = mkstemp()
679 os.write(fl_fd, '%s\n' % n.fullpath)
684 # Write minimal apt.conf
685 # TODO: Remove hardcoding from template
686 (ac_fd, ac_name) = mkstemp()
687 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
689 'cachedir': cnf["Dir::Cache"],
690 'overridedir': cnf["Dir::Override"],
694 # Run apt-ftparchive generate
695 os.chdir(os.path.dirname(ac_name))
696 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
698 # Run apt-ftparchive release
699 # TODO: Eww - fix this
700 bname = os.path.basename(self.path)
704 # We have to remove the Release file otherwise it'll be included in the
707 os.unlink(os.path.join(bname, 'Release'))
711 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
713 # Crude hack with open and append, but this whole section is and should be redone.
714 if self.notautomatic:
715 release=open("Release", "a")
716 release.write("NotAutomatic: yes")
721 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
722 if cnf.has_key("Dinstall::SigningPubKeyring"):
723 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
725 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
727 # Move the files if we got this far
728 os.rename('Release', os.path.join(bname, 'Release'))
730 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
732 # Clean up any left behind files
759 def clean_and_update(self, starttime, Logger, dryrun=False):
760 """WARNING: This routine commits for you"""
761 session = DBConn().session().object_session(self)
763 if self.generate_metadata and not dryrun:
764 self.write_metadata(starttime)
766 # Grab files older than our execution time
767 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
773 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
775 Logger.log(["I: Removing %s from the queue" % o.fullpath])
776 os.unlink(o.fullpath)
779 # If it wasn't there, don't worry
780 if e.errno == ENOENT:
783 # TODO: Replace with proper logging call
784 Logger.log(["E: Could not remove %s" % o.fullpath])
791 for f in os.listdir(self.path):
792 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
796 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
797 except NoResultFound:
798 fp = os.path.join(self.path, f)
800 Logger.log(["I: Would remove unused link %s" % fp])
802 Logger.log(["I: Removing unused link %s" % fp])
806 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
808 def add_file_from_pool(self, poolfile):
809 """Copies a file into the pool. Assumes that the PoolFile object is
810 attached to the same SQLAlchemy session as the Queue object is.
812 The caller is responsible for committing after calling this function."""
813 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
815 # Check if we have a file of this name or this ID already
816 for f in self.queuefiles:
817 if f.fileid is not None and f.fileid == poolfile.file_id or \
818 f.poolfile.filename == poolfile_basename:
819 # In this case, update the BuildQueueFile entry so we
820 # don't remove it too early
821 f.lastused = datetime.now()
822 DBConn().session().object_session(poolfile).add(f)
825 # Prepare BuildQueueFile object
826 qf = BuildQueueFile()
827 qf.build_queue_id = self.queue_id
828 qf.lastused = datetime.now()
829 qf.filename = poolfile_basename
831 targetpath = poolfile.fullpath
832 queuepath = os.path.join(self.path, poolfile_basename)
836 # We need to copy instead of symlink
838 utils.copy(targetpath, queuepath)
839 # NULL in the fileid field implies a copy
842 os.symlink(targetpath, queuepath)
843 qf.fileid = poolfile.file_id
847 # Get the same session as the PoolFile is using and add the qf to it
848 DBConn().session().object_session(poolfile).add(qf)
853 __all__.append('BuildQueue')
856 def get_build_queue(queuename, session=None):
858 Returns BuildQueue object for given C{queue name}, creating it if it does not
861 @type queuename: string
862 @param queuename: The name of the queue
864 @type session: Session
865 @param session: Optional SQLA session object (a temporary one will be
866 generated if not supplied)
869 @return: BuildQueue object for the given queue
872 q = session.query(BuildQueue).filter_by(queue_name=queuename)
876 except NoResultFound:
879 __all__.append('get_build_queue')
881 ################################################################################
883 class BuildQueueFile(object):
884 def __init__(self, *args, **kwargs):
888 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
892 return os.path.join(self.buildqueue.path, self.filename)
895 __all__.append('BuildQueueFile')
897 ################################################################################
899 class ChangePendingBinary(object):
900 def __init__(self, *args, **kwargs):
904 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
906 __all__.append('ChangePendingBinary')
908 ################################################################################
910 class ChangePendingFile(object):
911 def __init__(self, *args, **kwargs):
915 return '<ChangePendingFile %s>' % self.change_pending_file_id
917 __all__.append('ChangePendingFile')
919 ################################################################################
921 class ChangePendingSource(object):
922 def __init__(self, *args, **kwargs):
926 return '<ChangePendingSource %s>' % self.change_pending_source_id
928 __all__.append('ChangePendingSource')
930 ################################################################################
932 class Component(object):
933 def __init__(self, *args, **kwargs):
936 def __eq__(self, val):
937 if isinstance(val, str):
938 return (self.component_name == val)
939 # This signals to use the normal comparison operator
940 return NotImplemented
942 def __ne__(self, val):
943 if isinstance(val, str):
944 return (self.component_name != val)
945 # This signals to use the normal comparison operator
946 return NotImplemented
949 return '<Component %s>' % self.component_name
952 __all__.append('Component')
955 def get_component(component, session=None):
957 Returns database id for given C{component}.
959 @type component: string
960 @param component: The name of the override type
963 @return: the database id for the given component
966 component = component.lower()
968 q = session.query(Component).filter_by(component_name=component)
972 except NoResultFound:
975 __all__.append('get_component')
977 ################################################################################
979 class DBConfig(object):
980 def __init__(self, *args, **kwargs):
984 return '<DBConfig %s>' % self.name
986 __all__.append('DBConfig')
988 ################################################################################
991 def get_or_set_contents_file_id(filename, session=None):
993 Returns database id for given filename.
995 If no matching file is found, a row is inserted.
997 @type filename: string
998 @param filename: The filename
999 @type session: SQLAlchemy
1000 @param session: Optional SQL session object (a temporary one will be
1001 generated if not supplied). If not passed, a commit will be performed at
1002 the end of the function, otherwise the caller is responsible for commiting.
1005 @return: the database id for the given component
1008 q = session.query(ContentFilename).filter_by(filename=filename)
1011 ret = q.one().cafilename_id
1012 except NoResultFound:
1013 cf = ContentFilename()
1014 cf.filename = filename
1016 session.commit_or_flush()
1017 ret = cf.cafilename_id
1021 __all__.append('get_or_set_contents_file_id')
1024 def get_contents(suite, overridetype, section=None, session=None):
1026 Returns contents for a suite / overridetype combination, limiting
1027 to a section if not None.
1030 @param suite: Suite object
1032 @type overridetype: OverrideType
1033 @param overridetype: OverrideType object
1035 @type section: Section
1036 @param section: Optional section object to limit results to
1038 @type session: SQLAlchemy
1039 @param session: Optional SQL session object (a temporary one will be
1040 generated if not supplied)
1042 @rtype: ResultsProxy
1043 @return: ResultsProxy object set up to return tuples of (filename, section,
1047 # find me all of the contents for a given suite
1048 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1052 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1053 JOIN content_file_names n ON (c.filename=n.id)
1054 JOIN binaries b ON (b.id=c.binary_pkg)
1055 JOIN override o ON (o.package=b.package)
1056 JOIN section s ON (s.id=o.section)
1057 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1058 AND b.type=:overridetypename"""
1060 vals = {'suiteid': suite.suite_id,
1061 'overridetypeid': overridetype.overridetype_id,
1062 'overridetypename': overridetype.overridetype}
1064 if section is not None:
1065 contents_q += " AND s.id = :sectionid"
1066 vals['sectionid'] = section.section_id
1068 contents_q += " ORDER BY fn"
1070 return session.execute(contents_q, vals)
1072 __all__.append('get_contents')
1074 ################################################################################
1076 class ContentFilepath(object):
1077 def __init__(self, *args, **kwargs):
1081 return '<ContentFilepath %s>' % self.filepath
1083 __all__.append('ContentFilepath')
1086 def get_or_set_contents_path_id(filepath, session=None):
1088 Returns database id for given path.
1090 If no matching file is found, a row is inserted.
1092 @type filepath: string
1093 @param filepath: The filepath
1095 @type session: SQLAlchemy
1096 @param session: Optional SQL session object (a temporary one will be
1097 generated if not supplied). If not passed, a commit will be performed at
1098 the end of the function, otherwise the caller is responsible for commiting.
1101 @return: the database id for the given path
1104 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1107 ret = q.one().cafilepath_id
1108 except NoResultFound:
1109 cf = ContentFilepath()
1110 cf.filepath = filepath
1112 session.commit_or_flush()
1113 ret = cf.cafilepath_id
1117 __all__.append('get_or_set_contents_path_id')
1119 ################################################################################
1121 class ContentAssociation(object):
1122 def __init__(self, *args, **kwargs):
1126 return '<ContentAssociation %s>' % self.ca_id
1128 __all__.append('ContentAssociation')
1130 def insert_content_paths(binary_id, fullpaths, session=None):
1132 Make sure given path is associated with given binary id
1134 @type binary_id: int
1135 @param binary_id: the id of the binary
1136 @type fullpaths: list
1137 @param fullpaths: the list of paths of the file being associated with the binary
1138 @type session: SQLAlchemy session
1139 @param session: Optional SQLAlchemy session. If this is passed, the caller
1140 is responsible for ensuring a transaction has begun and committing the
1141 results or rolling back based on the result code. If not passed, a commit
1142 will be performed at the end of the function, otherwise the caller is
1143 responsible for commiting.
1145 @return: True upon success
1148 privatetrans = False
1150 session = DBConn().session()
1155 def generate_path_dicts():
1156 for fullpath in fullpaths:
1157 if fullpath.startswith( './' ):
1158 fullpath = fullpath[2:]
1160 yield {'filename':fullpath, 'id': binary_id }
1162 for d in generate_path_dicts():
1163 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1172 traceback.print_exc()
1174 # Only rollback if we set up the session ourself
1181 __all__.append('insert_content_paths')
1183 ################################################################################
1185 class DSCFile(object):
1186 def __init__(self, *args, **kwargs):
1190 return '<DSCFile %s>' % self.dscfile_id
1192 __all__.append('DSCFile')
1195 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1197 Returns a list of DSCFiles which may be empty
1199 @type dscfile_id: int (optional)
1200 @param dscfile_id: the dscfile_id of the DSCFiles to find
1202 @type source_id: int (optional)
1203 @param source_id: the source id related to the DSCFiles to find
1205 @type poolfile_id: int (optional)
1206 @param poolfile_id: the poolfile id related to the DSCFiles to find
1209 @return: Possibly empty list of DSCFiles
1212 q = session.query(DSCFile)
1214 if dscfile_id is not None:
1215 q = q.filter_by(dscfile_id=dscfile_id)
1217 if source_id is not None:
1218 q = q.filter_by(source_id=source_id)
1220 if poolfile_id is not None:
1221 q = q.filter_by(poolfile_id=poolfile_id)
1225 __all__.append('get_dscfiles')
1227 ################################################################################
1229 class PoolFile(ORMObject):
1230 def __init__(self, filename = None, location = None, filesize = -1, \
1232 self.filename = filename
1233 self.location = location
1234 self.filesize = filesize
1235 self.md5sum = md5sum
1239 return os.path.join(self.location.path, self.filename)
1241 def is_valid(self, filesize = -1, md5sum = None):\
1242 return self.filesize == filesize and self.md5sum == md5sum
1244 def properties(self):
1245 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1246 'sha256sum', 'location', 'source', 'last_used']
1248 def not_null_constraints(self):
1249 return ['filename', 'md5sum', 'location']
1251 __all__.append('PoolFile')
1254 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1257 (ValidFileFound [boolean], PoolFile object or None)
1259 @type filename: string
1260 @param filename: the filename of the file to check against the DB
1263 @param filesize: the size of the file to check against the DB
1265 @type md5sum: string
1266 @param md5sum: the md5sum of the file to check against the DB
1268 @type location_id: int
1269 @param location_id: the id of the location to look in
1272 @return: Tuple of length 2.
1273 - If valid pool file found: (C{True}, C{PoolFile object})
1274 - If valid pool file not found:
1275 - (C{False}, C{None}) if no file found
1276 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1279 poolfile = session.query(Location).get(location_id). \
1280 files.filter_by(filename=filename).first()
1282 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1285 return (valid, poolfile)
1287 __all__.append('check_poolfile')
1289 # TODO: the implementation can trivially be inlined at the place where the
1290 # function is called
1292 def get_poolfile_by_id(file_id, session=None):
1294 Returns a PoolFile objects or None for the given id
1297 @param file_id: the id of the file to look for
1299 @rtype: PoolFile or None
1300 @return: either the PoolFile object or None
1303 return session.query(PoolFile).get(file_id)
1305 __all__.append('get_poolfile_by_id')
1308 def get_poolfile_like_name(filename, session=None):
1310 Returns an array of PoolFile objects which are like the given name
1312 @type filename: string
1313 @param filename: the filename of the file to check against the DB
1316 @return: array of PoolFile objects
1319 # TODO: There must be a way of properly using bind parameters with %FOO%
1320 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1324 __all__.append('get_poolfile_like_name')
1327 def add_poolfile(filename, datadict, location_id, session=None):
1329 Add a new file to the pool
1331 @type filename: string
1332 @param filename: filename
1334 @type datadict: dict
1335 @param datadict: dict with needed data
1337 @type location_id: int
1338 @param location_id: database id of the location
1341 @return: the PoolFile object created
1343 poolfile = PoolFile()
1344 poolfile.filename = filename
1345 poolfile.filesize = datadict["size"]
1346 poolfile.md5sum = datadict["md5sum"]
1347 poolfile.sha1sum = datadict["sha1sum"]
1348 poolfile.sha256sum = datadict["sha256sum"]
1349 poolfile.location_id = location_id
1351 session.add(poolfile)
1352 # Flush to get a file id (NB: This is not a commit)
1357 __all__.append('add_poolfile')
1359 ################################################################################
1361 class Fingerprint(ORMObject):
1362 def __init__(self, fingerprint = None):
1363 self.fingerprint = fingerprint
1365 def properties(self):
1366 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1369 def not_null_constraints(self):
1370 return ['fingerprint']
1372 __all__.append('Fingerprint')
1375 def get_fingerprint(fpr, session=None):
1377 Returns Fingerprint object for given fpr.
1380 @param fpr: The fpr to find / add
1382 @type session: SQLAlchemy
1383 @param session: Optional SQL session object (a temporary one will be
1384 generated if not supplied).
1387 @return: the Fingerprint object for the given fpr or None
1390 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1394 except NoResultFound:
1399 __all__.append('get_fingerprint')
1402 def get_or_set_fingerprint(fpr, session=None):
1404 Returns Fingerprint object for given fpr.
1406 If no matching fpr is found, a row is inserted.
1409 @param fpr: The fpr to find / add
1411 @type session: SQLAlchemy
1412 @param session: Optional SQL session object (a temporary one will be
1413 generated if not supplied). If not passed, a commit will be performed at
1414 the end of the function, otherwise the caller is responsible for commiting.
1415 A flush will be performed either way.
1418 @return: the Fingerprint object for the given fpr
1421 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1425 except NoResultFound:
1426 fingerprint = Fingerprint()
1427 fingerprint.fingerprint = fpr
1428 session.add(fingerprint)
1429 session.commit_or_flush()
1434 __all__.append('get_or_set_fingerprint')
1436 ################################################################################
1438 # Helper routine for Keyring class
1439 def get_ldap_name(entry):
1441 for k in ["cn", "mn", "sn"]:
1443 if ret and ret[0] != "" and ret[0] != "-":
1445 return " ".join(name)
1447 ################################################################################
1449 class Keyring(object):
1450 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1451 " --with-colons --fingerprint --fingerprint"
1456 def __init__(self, *args, **kwargs):
1460 return '<Keyring %s>' % self.keyring_name
1462 def de_escape_gpg_str(self, txt):
1463 esclist = re.split(r'(\\x..)', txt)
1464 for x in range(1,len(esclist),2):
1465 esclist[x] = "%c" % (int(esclist[x][2:],16))
1466 return "".join(esclist)
1468 def parse_address(self, uid):
1469 """parses uid and returns a tuple of real name and email address"""
1471 (name, address) = email.Utils.parseaddr(uid)
1472 name = re.sub(r"\s*[(].*[)]", "", name)
1473 name = self.de_escape_gpg_str(name)
1476 return (name, address)
1478 def load_keys(self, keyring):
1479 if not self.keyring_id:
1480 raise Exception('Must be initialized with database information')
1482 k = os.popen(self.gpg_invocation % keyring, "r")
1486 for line in k.xreadlines():
1487 field = line.split(":")
1488 if field[0] == "pub":
1491 (name, addr) = self.parse_address(field[9])
1493 self.keys[key]["email"] = addr
1494 self.keys[key]["name"] = name
1495 self.keys[key]["fingerprints"] = []
1497 elif key and field[0] == "sub" and len(field) >= 12:
1498 signingkey = ("s" in field[11])
1499 elif key and field[0] == "uid":
1500 (name, addr) = self.parse_address(field[9])
1501 if "email" not in self.keys[key] and "@" in addr:
1502 self.keys[key]["email"] = addr
1503 self.keys[key]["name"] = name
1504 elif signingkey and field[0] == "fpr":
1505 self.keys[key]["fingerprints"].append(field[9])
1506 self.fpr_lookup[field[9]] = key
1508 def import_users_from_ldap(self, session):
1512 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1513 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1515 l = ldap.open(LDAPServer)
1516 l.simple_bind_s("","")
1517 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1518 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1519 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1521 ldap_fin_uid_id = {}
1528 uid = entry["uid"][0]
1529 name = get_ldap_name(entry)
1530 fingerprints = entry["keyFingerPrint"]
1532 for f in fingerprints:
1533 key = self.fpr_lookup.get(f, None)
1534 if key not in self.keys:
1536 self.keys[key]["uid"] = uid
1540 keyid = get_or_set_uid(uid, session).uid_id
1541 byuid[keyid] = (uid, name)
1542 byname[uid] = (keyid, name)
1544 return (byname, byuid)
1546 def generate_users_from_keyring(self, format, session):
1550 for x in self.keys.keys():
1551 if "email" not in self.keys[x]:
1553 self.keys[x]["uid"] = format % "invalid-uid"
1555 uid = format % self.keys[x]["email"]
1556 keyid = get_or_set_uid(uid, session).uid_id
1557 byuid[keyid] = (uid, self.keys[x]["name"])
1558 byname[uid] = (keyid, self.keys[x]["name"])
1559 self.keys[x]["uid"] = uid
1562 uid = format % "invalid-uid"
1563 keyid = get_or_set_uid(uid, session).uid_id
1564 byuid[keyid] = (uid, "ungeneratable user id")
1565 byname[uid] = (keyid, "ungeneratable user id")
1567 return (byname, byuid)
1569 __all__.append('Keyring')
1572 def get_keyring(keyring, session=None):
1574 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1575 If C{keyring} already has an entry, simply return the existing Keyring
1577 @type keyring: string
1578 @param keyring: the keyring name
1581 @return: the Keyring object for this keyring
1584 q = session.query(Keyring).filter_by(keyring_name=keyring)
1588 except NoResultFound:
1591 __all__.append('get_keyring')
1593 ################################################################################
1595 class KeyringACLMap(object):
1596 def __init__(self, *args, **kwargs):
1600 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1602 __all__.append('KeyringACLMap')
1604 ################################################################################
1606 class DBChange(object):
1607 def __init__(self, *args, **kwargs):
1611 return '<DBChange %s>' % self.changesname
1613 def clean_from_queue(self):
1614 session = DBConn().session().object_session(self)
1616 # Remove changes_pool_files entries
1619 # Remove changes_pending_files references
1622 # Clear out of queue
1623 self.in_queue = None
1624 self.approved_for_id = None
1626 __all__.append('DBChange')
1629 def get_dbchange(filename, session=None):
1631 returns DBChange object for given C{filename}.
1633 @type filename: string
1634 @param filename: the name of the file
1636 @type session: Session
1637 @param session: Optional SQLA session object (a temporary one will be
1638 generated if not supplied)
1641 @return: DBChange object for the given filename (C{None} if not present)
1644 q = session.query(DBChange).filter_by(changesname=filename)
1648 except NoResultFound:
1651 __all__.append('get_dbchange')
1653 ################################################################################
1655 class Location(ORMObject):
1656 def __init__(self, path = None):
1658 # the column 'type' should go away, see comment at mapper
1659 self.archive_type = 'pool'
1661 def properties(self):
1662 return ['path', 'archive_type', 'component', 'files_count']
1664 def not_null_constraints(self):
1665 return ['path', 'archive_type']
1667 __all__.append('Location')
1670 def get_location(location, component=None, archive=None, session=None):
1672 Returns Location object for the given combination of location, component
1675 @type location: string
1676 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1678 @type component: string
1679 @param component: the component name (if None, no restriction applied)
1681 @type archive: string
1682 @param archive: the archive name (if None, no restriction applied)
1684 @rtype: Location / None
1685 @return: Either a Location object or None if one can't be found
1688 q = session.query(Location).filter_by(path=location)
1690 if archive is not None:
1691 q = q.join(Archive).filter_by(archive_name=archive)
1693 if component is not None:
1694 q = q.join(Component).filter_by(component_name=component)
1698 except NoResultFound:
1701 __all__.append('get_location')
1703 ################################################################################
1705 class Maintainer(ORMObject):
1706 def __init__(self, name = None):
1709 def properties(self):
1710 return ['name', 'maintainer_id']
1712 def not_null_constraints(self):
1715 def get_split_maintainer(self):
1716 if not hasattr(self, 'name') or self.name is None:
1717 return ('', '', '', '')
1719 return fix_maintainer(self.name.strip())
1721 __all__.append('Maintainer')
1724 def get_or_set_maintainer(name, session=None):
1726 Returns Maintainer object for given maintainer name.
1728 If no matching maintainer name is found, a row is inserted.
1731 @param name: The maintainer name to add
1733 @type session: SQLAlchemy
1734 @param session: Optional SQL session object (a temporary one will be
1735 generated if not supplied). If not passed, a commit will be performed at
1736 the end of the function, otherwise the caller is responsible for commiting.
1737 A flush will be performed either way.
1740 @return: the Maintainer object for the given maintainer
1743 q = session.query(Maintainer).filter_by(name=name)
1746 except NoResultFound:
1747 maintainer = Maintainer()
1748 maintainer.name = name
1749 session.add(maintainer)
1750 session.commit_or_flush()
1755 __all__.append('get_or_set_maintainer')
1758 def get_maintainer(maintainer_id, session=None):
1760 Return the name of the maintainer behind C{maintainer_id} or None if that
1761 maintainer_id is invalid.
1763 @type maintainer_id: int
1764 @param maintainer_id: the id of the maintainer
1767 @return: the Maintainer with this C{maintainer_id}
1770 return session.query(Maintainer).get(maintainer_id)
1772 __all__.append('get_maintainer')
1774 ################################################################################
1776 class NewComment(object):
1777 def __init__(self, *args, **kwargs):
1781 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1783 __all__.append('NewComment')
1786 def has_new_comment(package, version, session=None):
1788 Returns true if the given combination of C{package}, C{version} has a comment.
1790 @type package: string
1791 @param package: name of the package
1793 @type version: string
1794 @param version: package version
1796 @type session: Session
1797 @param session: Optional SQLA session object (a temporary one will be
1798 generated if not supplied)
1804 q = session.query(NewComment)
1805 q = q.filter_by(package=package)
1806 q = q.filter_by(version=version)
1808 return bool(q.count() > 0)
1810 __all__.append('has_new_comment')
1813 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1815 Returns (possibly empty) list of NewComment objects for the given
1818 @type package: string (optional)
1819 @param package: name of the package
1821 @type version: string (optional)
1822 @param version: package version
1824 @type comment_id: int (optional)
1825 @param comment_id: An id of a comment
1827 @type session: Session
1828 @param session: Optional SQLA session object (a temporary one will be
1829 generated if not supplied)
1832 @return: A (possibly empty) list of NewComment objects will be returned
1835 q = session.query(NewComment)
1836 if package is not None: q = q.filter_by(package=package)
1837 if version is not None: q = q.filter_by(version=version)
1838 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1842 __all__.append('get_new_comments')
1844 ################################################################################
1846 class Override(object):
1847 def __init__(self, *args, **kwargs):
1851 return '<Override %s (%s)>' % (self.package, self.suite_id)
1853 __all__.append('Override')
1856 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1858 Returns Override object for the given parameters
1860 @type package: string
1861 @param package: The name of the package
1863 @type suite: string, list or None
1864 @param suite: The name of the suite (or suites if a list) to limit to. If
1865 None, don't limit. Defaults to None.
1867 @type component: string, list or None
1868 @param component: The name of the component (or components if a list) to
1869 limit to. If None, don't limit. Defaults to None.
1871 @type overridetype: string, list or None
1872 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1873 limit to. If None, don't limit. Defaults to None.
1875 @type session: Session
1876 @param session: Optional SQLA session object (a temporary one will be
1877 generated if not supplied)
1880 @return: A (possibly empty) list of Override objects will be returned
1883 q = session.query(Override)
1884 q = q.filter_by(package=package)
1886 if suite is not None:
1887 if not isinstance(suite, list): suite = [suite]
1888 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1890 if component is not None:
1891 if not isinstance(component, list): component = [component]
1892 q = q.join(Component).filter(Component.component_name.in_(component))
1894 if overridetype is not None:
1895 if not isinstance(overridetype, list): overridetype = [overridetype]
1896 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1900 __all__.append('get_override')
1903 ################################################################################
1905 class OverrideType(object):
1906 def __init__(self, *args, **kwargs):
1910 return '<OverrideType %s>' % self.overridetype
1912 __all__.append('OverrideType')
1915 def get_override_type(override_type, session=None):
1917 Returns OverrideType object for given C{override type}.
1919 @type override_type: string
1920 @param override_type: The name of the override type
1922 @type session: Session
1923 @param session: Optional SQLA session object (a temporary one will be
1924 generated if not supplied)
1927 @return: the database id for the given override type
1930 q = session.query(OverrideType).filter_by(overridetype=override_type)
1934 except NoResultFound:
1937 __all__.append('get_override_type')
1939 ################################################################################
1941 class DebContents(object):
1942 def __init__(self, *args, **kwargs):
1946 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1948 __all__.append('DebContents')
1951 class UdebContents(object):
1952 def __init__(self, *args, **kwargs):
1956 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1958 __all__.append('UdebContents')
1960 class PendingBinContents(object):
1961 def __init__(self, *args, **kwargs):
1965 return '<PendingBinContents %s>' % self.contents_id
1967 __all__.append('PendingBinContents')
1969 def insert_pending_content_paths(package,
1974 Make sure given paths are temporarily associated with given
1978 @param package: the package to associate with should have been read in from the binary control file
1979 @type fullpaths: list
1980 @param fullpaths: the list of paths of the file being associated with the binary
1981 @type session: SQLAlchemy session
1982 @param session: Optional SQLAlchemy session. If this is passed, the caller
1983 is responsible for ensuring a transaction has begun and committing the
1984 results or rolling back based on the result code. If not passed, a commit
1985 will be performed at the end of the function
1987 @return: True upon success, False if there is a problem
1990 privatetrans = False
1993 session = DBConn().session()
1997 arch = get_architecture(package['Architecture'], session)
1998 arch_id = arch.arch_id
2000 # Remove any already existing recorded files for this package
2001 q = session.query(PendingBinContents)
2002 q = q.filter_by(package=package['Package'])
2003 q = q.filter_by(version=package['Version'])
2004 q = q.filter_by(architecture=arch_id)
2007 for fullpath in fullpaths:
2009 if fullpath.startswith( "./" ):
2010 fullpath = fullpath[2:]
2012 pca = PendingBinContents()
2013 pca.package = package['Package']
2014 pca.version = package['Version']
2016 pca.architecture = arch_id
2019 pca.type = 8 # gross
2021 pca.type = 7 # also gross
2024 # Only commit if we set up the session ourself
2032 except Exception, e:
2033 traceback.print_exc()
2035 # Only rollback if we set up the session ourself
2042 __all__.append('insert_pending_content_paths')
2044 ################################################################################
2046 class PolicyQueue(object):
2047 def __init__(self, *args, **kwargs):
2051 return '<PolicyQueue %s>' % self.queue_name
2053 __all__.append('PolicyQueue')
2056 def get_policy_queue(queuename, session=None):
2058 Returns PolicyQueue object for given C{queue name}
2060 @type queuename: string
2061 @param queuename: The name of the queue
2063 @type session: Session
2064 @param session: Optional SQLA session object (a temporary one will be
2065 generated if not supplied)
2068 @return: PolicyQueue object for the given queue
2071 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2075 except NoResultFound:
2078 __all__.append('get_policy_queue')
2081 def get_policy_queue_from_path(pathname, session=None):
2083 Returns PolicyQueue object for given C{path name}
2085 @type queuename: string
2086 @param queuename: The path
2088 @type session: Session
2089 @param session: Optional SQLA session object (a temporary one will be
2090 generated if not supplied)
2093 @return: PolicyQueue object for the given queue
2096 q = session.query(PolicyQueue).filter_by(path=pathname)
2100 except NoResultFound:
2103 __all__.append('get_policy_queue_from_path')
2105 ################################################################################
2107 class Priority(object):
2108 def __init__(self, *args, **kwargs):
2111 def __eq__(self, val):
2112 if isinstance(val, str):
2113 return (self.priority == val)
2114 # This signals to use the normal comparison operator
2115 return NotImplemented
2117 def __ne__(self, val):
2118 if isinstance(val, str):
2119 return (self.priority != val)
2120 # This signals to use the normal comparison operator
2121 return NotImplemented
2124 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2126 __all__.append('Priority')
2129 def get_priority(priority, session=None):
2131 Returns Priority object for given C{priority name}.
2133 @type priority: string
2134 @param priority: The name of the priority
2136 @type session: Session
2137 @param session: Optional SQLA session object (a temporary one will be
2138 generated if not supplied)
2141 @return: Priority object for the given priority
2144 q = session.query(Priority).filter_by(priority=priority)
2148 except NoResultFound:
2151 __all__.append('get_priority')
2154 def get_priorities(session=None):
2156 Returns dictionary of priority names -> id mappings
2158 @type session: Session
2159 @param session: Optional SQL session object (a temporary one will be
2160 generated if not supplied)
2163 @return: dictionary of priority names -> id mappings
2167 q = session.query(Priority)
2169 ret[x.priority] = x.priority_id
2173 __all__.append('get_priorities')
2175 ################################################################################
2177 class Section(object):
2178 def __init__(self, *args, **kwargs):
2181 def __eq__(self, val):
2182 if isinstance(val, str):
2183 return (self.section == val)
2184 # This signals to use the normal comparison operator
2185 return NotImplemented
2187 def __ne__(self, val):
2188 if isinstance(val, str):
2189 return (self.section != val)
2190 # This signals to use the normal comparison operator
2191 return NotImplemented
2194 return '<Section %s>' % self.section
2196 __all__.append('Section')
2199 def get_section(section, session=None):
2201 Returns Section object for given C{section name}.
2203 @type section: string
2204 @param section: The name of the section
2206 @type session: Session
2207 @param session: Optional SQLA session object (a temporary one will be
2208 generated if not supplied)
2211 @return: Section object for the given section name
2214 q = session.query(Section).filter_by(section=section)
2218 except NoResultFound:
2221 __all__.append('get_section')
2224 def get_sections(session=None):
2226 Returns dictionary of section names -> id mappings
2228 @type session: Session
2229 @param session: Optional SQL session object (a temporary one will be
2230 generated if not supplied)
2233 @return: dictionary of section names -> id mappings
2237 q = session.query(Section)
2239 ret[x.section] = x.section_id
2243 __all__.append('get_sections')
2245 ################################################################################
2247 class DBSource(ORMObject):
2248 def __init__(self, source = None, version = None, maintainer = None, \
2249 changedby = None, poolfile = None, install_date = None):
2250 self.source = source
2251 self.version = version
2252 self.maintainer = maintainer
2253 self.changedby = changedby
2254 self.poolfile = poolfile
2255 self.install_date = install_date
2257 def properties(self):
2258 return ['source', 'source_id', 'maintainer', 'changedby', \
2259 'fingerprint', 'poolfile', 'version', 'suites_count', \
2262 def not_null_constraints(self):
2263 return ['source', 'version', 'install_date', 'maintainer', \
2264 'changedby', 'poolfile', 'install_date']
2266 __all__.append('DBSource')
2269 def source_exists(source, source_version, suites = ["any"], session=None):
2271 Ensure that source exists somewhere in the archive for the binary
2272 upload being processed.
2273 1. exact match => 1.0-3
2274 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2276 @type source: string
2277 @param source: source name
2279 @type source_version: string
2280 @param source_version: expected source version
2283 @param suites: list of suites to check in, default I{any}
2285 @type session: Session
2286 @param session: Optional SQLA session object (a temporary one will be
2287 generated if not supplied)
2290 @return: returns 1 if a source with expected version is found, otherwise 0
2297 from daklib.regexes import re_bin_only_nmu
2298 orig_source_version = re_bin_only_nmu.sub('', source_version)
2300 for suite in suites:
2301 q = session.query(DBSource).filter_by(source=source). \
2302 filter(DBSource.version.in_([source_version, orig_source_version]))
2304 # source must exist in suite X, or in some other suite that's
2305 # mapped to X, recursively... silent-maps are counted too,
2306 # unreleased-maps aren't.
2307 maps = cnf.ValueList("SuiteMappings")[:]
2309 maps = [ m.split() for m in maps ]
2310 maps = [ (x[1], x[2]) for x in maps
2311 if x[0] == "map" or x[0] == "silent-map" ]
2314 if x[1] in s and x[0] not in s:
2317 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2322 # No source found so return not ok
2327 __all__.append('source_exists')
2330 def get_suites_source_in(source, session=None):
2332 Returns list of Suite objects which given C{source} name is in
2335 @param source: DBSource package name to search for
2338 @return: list of Suite objects for the given source
2341 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2343 __all__.append('get_suites_source_in')
2346 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2348 Returns list of DBSource objects for given C{source} name and other parameters
2351 @param source: DBSource package name to search for
2353 @type version: str or None
2354 @param version: DBSource version name to search for or None if not applicable
2356 @type dm_upload_allowed: bool
2357 @param dm_upload_allowed: If None, no effect. If True or False, only
2358 return packages with that dm_upload_allowed setting
2360 @type session: Session
2361 @param session: Optional SQL session object (a temporary one will be
2362 generated if not supplied)
2365 @return: list of DBSource objects for the given name (may be empty)
2368 q = session.query(DBSource).filter_by(source=source)
2370 if version is not None:
2371 q = q.filter_by(version=version)
2373 if dm_upload_allowed is not None:
2374 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2378 __all__.append('get_sources_from_name')
2380 # FIXME: This function fails badly if it finds more than 1 source package and
2381 # its implementation is trivial enough to be inlined.
2383 def get_source_in_suite(source, suite, session=None):
2385 Returns a DBSource object for a combination of C{source} and C{suite}.
2387 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2388 - B{suite} - a suite name, eg. I{unstable}
2390 @type source: string
2391 @param source: source package name
2394 @param suite: the suite name
2397 @return: the version for I{source} in I{suite}
2401 q = get_suite(suite, session).get_sources(source)
2404 except NoResultFound:
2407 __all__.append('get_source_in_suite')
2409 ################################################################################
2412 def add_dsc_to_db(u, filename, session=None):
2413 entry = u.pkg.files[filename]
2417 source.source = u.pkg.dsc["source"]
2418 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2419 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2420 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2421 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2422 source.install_date = datetime.now().date()
2424 dsc_component = entry["component"]
2425 dsc_location_id = entry["location id"]
2427 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2429 # Set up a new poolfile if necessary
2430 if not entry.has_key("files id") or not entry["files id"]:
2431 filename = entry["pool name"] + filename
2432 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2434 pfs.append(poolfile)
2435 entry["files id"] = poolfile.file_id
2437 source.poolfile_id = entry["files id"]
2440 suite_names = u.pkg.changes["distribution"].keys()
2441 source.suites = session.query(Suite). \
2442 filter(Suite.suite_name.in_(suite_names)).all()
2444 # Add the source files to the DB (files and dsc_files)
2446 dscfile.source_id = source.source_id
2447 dscfile.poolfile_id = entry["files id"]
2448 session.add(dscfile)
2450 for dsc_file, dentry in u.pkg.dsc_files.items():
2452 df.source_id = source.source_id
2454 # If the .orig tarball is already in the pool, it's
2455 # files id is stored in dsc_files by check_dsc().
2456 files_id = dentry.get("files id", None)
2458 # Find the entry in the files hash
2459 # TODO: Bail out here properly
2461 for f, e in u.pkg.files.items():
2466 if files_id is None:
2467 filename = dfentry["pool name"] + dsc_file
2469 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2470 # FIXME: needs to check for -1/-2 and or handle exception
2471 if found and obj is not None:
2472 files_id = obj.file_id
2475 # If still not found, add it
2476 if files_id is None:
2477 # HACK: Force sha1sum etc into dentry
2478 dentry["sha1sum"] = dfentry["sha1sum"]
2479 dentry["sha256sum"] = dfentry["sha256sum"]
2480 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2481 pfs.append(poolfile)
2482 files_id = poolfile.file_id
2484 poolfile = get_poolfile_by_id(files_id, session)
2485 if poolfile is None:
2486 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2487 pfs.append(poolfile)
2489 df.poolfile_id = files_id
2492 # Add the src_uploaders to the DB
2493 uploader_ids = [source.maintainer_id]
2494 if u.pkg.dsc.has_key("uploaders"):
2495 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2497 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2500 for up_id in uploader_ids:
2501 if added_ids.has_key(up_id):
2503 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2509 su.maintainer_id = up_id
2510 su.source_id = source.source_id
2515 return source, dsc_component, dsc_location_id, pfs
2517 __all__.append('add_dsc_to_db')
2520 def add_deb_to_db(u, filename, session=None):
2522 Contrary to what you might expect, this routine deals with both
2523 debs and udebs. That info is in 'dbtype', whilst 'type' is
2524 'deb' for both of them
2527 entry = u.pkg.files[filename]
2530 bin.package = entry["package"]
2531 bin.version = entry["version"]
2532 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2533 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2534 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2535 bin.binarytype = entry["dbtype"]
2538 filename = entry["pool name"] + filename
2539 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2540 if not entry.get("location id", None):
2541 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2543 if entry.get("files id", None):
2544 poolfile = get_poolfile_by_id(bin.poolfile_id)
2545 bin.poolfile_id = entry["files id"]
2547 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2548 bin.poolfile_id = entry["files id"] = poolfile.file_id
2551 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2552 if len(bin_sources) != 1:
2553 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2554 (bin.package, bin.version, entry["architecture"],
2555 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2557 bin.source_id = bin_sources[0].source_id
2559 # Add and flush object so it has an ID
2563 # Add BinAssociations
2564 for suite_name in u.pkg.changes["distribution"].keys():
2565 ba = BinAssociation()
2566 ba.binary_id = bin.binary_id
2567 ba.suite_id = get_suite(suite_name).suite_id
2572 # Deal with contents - disabled for now
2573 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2575 # print "REJECT\nCould not determine contents of package %s" % bin.package
2576 # session.rollback()
2577 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2581 __all__.append('add_deb_to_db')
2583 ################################################################################
2585 class SourceACL(object):
2586 def __init__(self, *args, **kwargs):
2590 return '<SourceACL %s>' % self.source_acl_id
2592 __all__.append('SourceACL')
2594 ################################################################################
2596 class SrcFormat(object):
2597 def __init__(self, *args, **kwargs):
2601 return '<SrcFormat %s>' % (self.format_name)
2603 __all__.append('SrcFormat')
2605 ################################################################################
2607 class SrcUploader(object):
2608 def __init__(self, *args, **kwargs):
2612 return '<SrcUploader %s>' % self.uploader_id
2614 __all__.append('SrcUploader')
2616 ################################################################################
2618 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2619 ('SuiteID', 'suite_id'),
2620 ('Version', 'version'),
2621 ('Origin', 'origin'),
2623 ('Description', 'description'),
2624 ('Untouchable', 'untouchable'),
2625 ('Announce', 'announce'),
2626 ('Codename', 'codename'),
2627 ('OverrideCodename', 'overridecodename'),
2628 ('ValidTime', 'validtime'),
2629 ('Priority', 'priority'),
2630 ('NotAutomatic', 'notautomatic'),
2631 ('CopyChanges', 'copychanges'),
2632 ('OverrideSuite', 'overridesuite')]
2634 # Why the heck don't we have any UNIQUE constraints in table suite?
2635 # TODO: Add UNIQUE constraints for appropriate columns.
2636 class Suite(ORMObject):
2637 def __init__(self, suite_name = None, version = None):
2638 self.suite_name = suite_name
2639 self.version = version
2641 def properties(self):
2642 return ['suite_name', 'version']
2644 def not_null_constraints(self):
2645 return ['suite_name', 'version']
2647 def __eq__(self, val):
2648 if isinstance(val, str):
2649 return (self.suite_name == val)
2650 # This signals to use the normal comparison operator
2651 return NotImplemented
2653 def __ne__(self, val):
2654 if isinstance(val, str):
2655 return (self.suite_name != val)
2656 # This signals to use the normal comparison operator
2657 return NotImplemented
2661 for disp, field in SUITE_FIELDS:
2662 val = getattr(self, field, None)
2664 ret.append("%s: %s" % (disp, val))
2666 return "\n".join(ret)
2668 def get_architectures(self, skipsrc=False, skipall=False):
2670 Returns list of Architecture objects
2672 @type skipsrc: boolean
2673 @param skipsrc: Whether to skip returning the 'source' architecture entry
2676 @type skipall: boolean
2677 @param skipall: Whether to skip returning the 'all' architecture entry
2681 @return: list of Architecture objects for the given name (may be empty)
2684 q = object_session(self).query(Architecture).with_parent(self)
2686 q = q.filter(Architecture.arch_string != 'source')
2688 q = q.filter(Architecture.arch_string != 'all')
2689 return q.order_by(Architecture.arch_string).all()
2691 def get_sources(self, source):
2693 Returns a query object representing DBSource that is part of C{suite}.
2695 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2697 @type source: string
2698 @param source: source package name
2700 @rtype: sqlalchemy.orm.query.Query
2701 @return: a query of DBSource
2705 session = object_session(self)
2706 return session.query(DBSource).filter_by(source = source). \
2709 __all__.append('Suite')
2712 def get_suite(suite, session=None):
2714 Returns Suite object for given C{suite name}.
2717 @param suite: The name of the suite
2719 @type session: Session
2720 @param session: Optional SQLA session object (a temporary one will be
2721 generated if not supplied)
2724 @return: Suite object for the requested suite name (None if not present)
2727 q = session.query(Suite).filter_by(suite_name=suite)
2731 except NoResultFound:
2734 __all__.append('get_suite')
2736 ################################################################################
2738 # TODO: should be removed because the implementation is too trivial
2740 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2742 Returns list of Architecture objects for given C{suite} name
2745 @param suite: Suite name to search for
2747 @type skipsrc: boolean
2748 @param skipsrc: Whether to skip returning the 'source' architecture entry
2751 @type skipall: boolean
2752 @param skipall: Whether to skip returning the 'all' architecture entry
2755 @type session: Session
2756 @param session: Optional SQL session object (a temporary one will be
2757 generated if not supplied)
2760 @return: list of Architecture objects for the given name (may be empty)
2763 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2765 __all__.append('get_suite_architectures')
2767 ################################################################################
2769 class SuiteSrcFormat(object):
2770 def __init__(self, *args, **kwargs):
2774 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2776 __all__.append('SuiteSrcFormat')
2779 def get_suite_src_formats(suite, session=None):
2781 Returns list of allowed SrcFormat for C{suite}.
2784 @param suite: Suite name to search for
2786 @type session: Session
2787 @param session: Optional SQL session object (a temporary one will be
2788 generated if not supplied)
2791 @return: the list of allowed source formats for I{suite}
2794 q = session.query(SrcFormat)
2795 q = q.join(SuiteSrcFormat)
2796 q = q.join(Suite).filter_by(suite_name=suite)
2797 q = q.order_by('format_name')
2801 __all__.append('get_suite_src_formats')
2803 ################################################################################
2805 class Uid(ORMObject):
2806 def __init__(self, uid = None, name = None):
2810 def __eq__(self, val):
2811 if isinstance(val, str):
2812 return (self.uid == val)
2813 # This signals to use the normal comparison operator
2814 return NotImplemented
2816 def __ne__(self, val):
2817 if isinstance(val, str):
2818 return (self.uid != val)
2819 # This signals to use the normal comparison operator
2820 return NotImplemented
2822 def properties(self):
2823 return ['uid', 'name', 'fingerprint']
2825 def not_null_constraints(self):
2828 __all__.append('Uid')
2831 def get_or_set_uid(uidname, session=None):
2833 Returns uid object for given uidname.
2835 If no matching uidname is found, a row is inserted.
2837 @type uidname: string
2838 @param uidname: The uid to add
2840 @type session: SQLAlchemy
2841 @param session: Optional SQL session object (a temporary one will be
2842 generated if not supplied). If not passed, a commit will be performed at
2843 the end of the function, otherwise the caller is responsible for commiting.
2846 @return: the uid object for the given uidname
2849 q = session.query(Uid).filter_by(uid=uidname)
2853 except NoResultFound:
2857 session.commit_or_flush()
2862 __all__.append('get_or_set_uid')
2865 def get_uid_from_fingerprint(fpr, session=None):
2866 q = session.query(Uid)
2867 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2871 except NoResultFound:
2874 __all__.append('get_uid_from_fingerprint')
2876 ################################################################################
2878 class UploadBlock(object):
2879 def __init__(self, *args, **kwargs):
2883 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2885 __all__.append('UploadBlock')
2887 ################################################################################
2889 class DBConn(object):
2891 database module init.
2895 def __init__(self, *args, **kwargs):
2896 self.__dict__ = self.__shared_state
2898 if not getattr(self, 'initialised', False):
2899 self.initialised = True
2900 self.debug = kwargs.has_key('debug')
2903 def __setuptables(self):
2904 tables_with_primary = (
2915 'changes_pending_binaries',
2916 'changes_pending_files',
2917 'changes_pending_source',
2927 'pending_bin_contents',
2939 # The following tables have primary keys but sqlalchemy
2940 # version 0.5 fails to reflect them correctly with database
2941 # versions before upgrade #41.
2943 #'build_queue_files',
2946 tables_no_primary = (
2948 'changes_pending_files_map',
2949 'changes_pending_source_files',
2950 'changes_pool_files',
2953 'suite_architectures',
2954 'suite_src_formats',
2955 'suite_build_queue_copy',
2957 # see the comment above
2959 'build_queue_files',
2963 'almost_obsolete_all_associations',
2964 'almost_obsolete_src_associations',
2965 'any_associations_source',
2966 'bin_assoc_by_arch',
2967 'bin_associations_binaries',
2968 'binaries_suite_arch',
2969 'binfiles_suite_component_arch',
2972 'newest_all_associations',
2973 'newest_any_associations',
2975 'newest_src_association',
2976 'obsolete_all_associations',
2977 'obsolete_any_associations',
2978 'obsolete_any_by_all_associations',
2979 'obsolete_src_associations',
2981 'src_associations_bin',
2982 'src_associations_src',
2983 'suite_arch_by_name',
2986 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2987 # correctly and that is why we have to use a workaround. It can
2988 # be removed as soon as we switch to version 0.6.
2989 for table_name in tables_with_primary:
2990 table = Table(table_name, self.db_meta, \
2991 Column('id', Integer, primary_key = True), \
2992 autoload=True, useexisting=True)
2993 setattr(self, 'tbl_%s' % table_name, table)
2995 for table_name in tables_no_primary:
2996 table = Table(table_name, self.db_meta, autoload=True)
2997 setattr(self, 'tbl_%s' % table_name, table)
2999 for view_name in views:
3000 view = Table(view_name, self.db_meta, autoload=True)
3001 setattr(self, 'view_%s' % view_name, view)
3003 def __setupmappers(self):
3004 mapper(Architecture, self.tbl_architecture,
3005 properties = dict(arch_id = self.tbl_architecture.c.id,
3006 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3007 order_by='suite_name',
3008 backref=backref('architectures', order_by='arch_string'))),
3009 extension = validator)
3011 mapper(Archive, self.tbl_archive,
3012 properties = dict(archive_id = self.tbl_archive.c.id,
3013 archive_name = self.tbl_archive.c.name))
3015 mapper(BinAssociation, self.tbl_bin_associations,
3016 properties = dict(ba_id = self.tbl_bin_associations.c.id,
3017 suite_id = self.tbl_bin_associations.c.suite,
3018 suite = relation(Suite),
3019 binary_id = self.tbl_bin_associations.c.bin,
3020 binary = relation(DBBinary)))
3022 mapper(PendingBinContents, self.tbl_pending_bin_contents,
3023 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3024 filename = self.tbl_pending_bin_contents.c.filename,
3025 package = self.tbl_pending_bin_contents.c.package,
3026 version = self.tbl_pending_bin_contents.c.version,
3027 arch = self.tbl_pending_bin_contents.c.arch,
3028 otype = self.tbl_pending_bin_contents.c.type))
3030 mapper(DebContents, self.tbl_deb_contents,
3031 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3032 package=self.tbl_deb_contents.c.package,
3033 suite=self.tbl_deb_contents.c.suite,
3034 arch=self.tbl_deb_contents.c.arch,
3035 section=self.tbl_deb_contents.c.section,
3036 filename=self.tbl_deb_contents.c.filename))
3038 mapper(UdebContents, self.tbl_udeb_contents,
3039 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3040 package=self.tbl_udeb_contents.c.package,
3041 suite=self.tbl_udeb_contents.c.suite,
3042 arch=self.tbl_udeb_contents.c.arch,
3043 section=self.tbl_udeb_contents.c.section,
3044 filename=self.tbl_udeb_contents.c.filename))
3046 mapper(BuildQueue, self.tbl_build_queue,
3047 properties = dict(queue_id = self.tbl_build_queue.c.id))
3049 mapper(BuildQueueFile, self.tbl_build_queue_files,
3050 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3051 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3053 mapper(DBBinary, self.tbl_binaries,
3054 properties = dict(binary_id = self.tbl_binaries.c.id,
3055 package = self.tbl_binaries.c.package,
3056 version = self.tbl_binaries.c.version,
3057 maintainer_id = self.tbl_binaries.c.maintainer,
3058 maintainer = relation(Maintainer),
3059 source_id = self.tbl_binaries.c.source,
3060 source = relation(DBSource),
3061 arch_id = self.tbl_binaries.c.architecture,
3062 architecture = relation(Architecture),
3063 poolfile_id = self.tbl_binaries.c.file,
3064 poolfile = relation(PoolFile),
3065 binarytype = self.tbl_binaries.c.type,
3066 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3067 fingerprint = relation(Fingerprint),
3068 install_date = self.tbl_binaries.c.install_date,
3069 binassociations = relation(BinAssociation,
3070 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
3072 mapper(BinaryACL, self.tbl_binary_acl,
3073 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3075 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3076 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3077 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3078 architecture = relation(Architecture)))
3080 mapper(Component, self.tbl_component,
3081 properties = dict(component_id = self.tbl_component.c.id,
3082 component_name = self.tbl_component.c.name))
3084 mapper(DBConfig, self.tbl_config,
3085 properties = dict(config_id = self.tbl_config.c.id))
3087 mapper(DSCFile, self.tbl_dsc_files,
3088 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3089 source_id = self.tbl_dsc_files.c.source,
3090 source = relation(DBSource),
3091 poolfile_id = self.tbl_dsc_files.c.file,
3092 poolfile = relation(PoolFile)))
3094 mapper(PoolFile, self.tbl_files,
3095 properties = dict(file_id = self.tbl_files.c.id,
3096 filesize = self.tbl_files.c.size,
3097 location_id = self.tbl_files.c.location,
3098 location = relation(Location,
3099 # using lazy='dynamic' in the back
3100 # reference because we have A LOT of
3101 # files in one location
3102 backref=backref('files', lazy='dynamic'))),
3103 extension = validator)
3105 mapper(Fingerprint, self.tbl_fingerprint,
3106 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3107 uid_id = self.tbl_fingerprint.c.uid,
3108 uid = relation(Uid),
3109 keyring_id = self.tbl_fingerprint.c.keyring,
3110 keyring = relation(Keyring),
3111 source_acl = relation(SourceACL),
3112 binary_acl = relation(BinaryACL)),
3113 extension = validator)
3115 mapper(Keyring, self.tbl_keyrings,
3116 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3117 keyring_id = self.tbl_keyrings.c.id))
3119 mapper(DBChange, self.tbl_changes,
3120 properties = dict(change_id = self.tbl_changes.c.id,
3121 poolfiles = relation(PoolFile,
3122 secondary=self.tbl_changes_pool_files,
3123 backref="changeslinks"),
3124 seen = self.tbl_changes.c.seen,
3125 source = self.tbl_changes.c.source,
3126 binaries = self.tbl_changes.c.binaries,
3127 architecture = self.tbl_changes.c.architecture,
3128 distribution = self.tbl_changes.c.distribution,
3129 urgency = self.tbl_changes.c.urgency,
3130 maintainer = self.tbl_changes.c.maintainer,
3131 changedby = self.tbl_changes.c.changedby,
3132 date = self.tbl_changes.c.date,
3133 version = self.tbl_changes.c.version,
3134 files = relation(ChangePendingFile,
3135 secondary=self.tbl_changes_pending_files_map,
3136 backref="changesfile"),
3137 in_queue_id = self.tbl_changes.c.in_queue,
3138 in_queue = relation(PolicyQueue,
3139 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3140 approved_for_id = self.tbl_changes.c.approved_for))
3142 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3143 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3145 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3146 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3147 filename = self.tbl_changes_pending_files.c.filename,
3148 size = self.tbl_changes_pending_files.c.size,
3149 md5sum = self.tbl_changes_pending_files.c.md5sum,
3150 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3151 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3153 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3154 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3155 change = relation(DBChange),
3156 maintainer = relation(Maintainer,
3157 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3158 changedby = relation(Maintainer,
3159 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3160 fingerprint = relation(Fingerprint),
3161 source_files = relation(ChangePendingFile,
3162 secondary=self.tbl_changes_pending_source_files,
3163 backref="pending_sources")))
3166 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3167 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3168 keyring = relation(Keyring, backref="keyring_acl_map"),
3169 architecture = relation(Architecture)))
3171 mapper(Location, self.tbl_location,
3172 properties = dict(location_id = self.tbl_location.c.id,
3173 component_id = self.tbl_location.c.component,
3174 component = relation(Component),
3175 archive_id = self.tbl_location.c.archive,
3176 archive = relation(Archive),
3177 # FIXME: the 'type' column is old cruft and
3178 # should be removed in the future.
3179 archive_type = self.tbl_location.c.type),
3180 extension = validator)
3182 mapper(Maintainer, self.tbl_maintainer,
3183 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3184 maintains_sources = relation(DBSource, backref='maintainer',
3185 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3186 changed_sources = relation(DBSource, backref='changedby',
3187 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3188 extension = validator)
3190 mapper(NewComment, self.tbl_new_comments,
3191 properties = dict(comment_id = self.tbl_new_comments.c.id))
3193 mapper(Override, self.tbl_override,
3194 properties = dict(suite_id = self.tbl_override.c.suite,
3195 suite = relation(Suite),
3196 package = self.tbl_override.c.package,
3197 component_id = self.tbl_override.c.component,
3198 component = relation(Component),
3199 priority_id = self.tbl_override.c.priority,
3200 priority = relation(Priority),
3201 section_id = self.tbl_override.c.section,
3202 section = relation(Section),
3203 overridetype_id = self.tbl_override.c.type,
3204 overridetype = relation(OverrideType)))
3206 mapper(OverrideType, self.tbl_override_type,
3207 properties = dict(overridetype = self.tbl_override_type.c.type,
3208 overridetype_id = self.tbl_override_type.c.id))
3210 mapper(PolicyQueue, self.tbl_policy_queue,
3211 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3213 mapper(Priority, self.tbl_priority,
3214 properties = dict(priority_id = self.tbl_priority.c.id))
3216 mapper(Section, self.tbl_section,
3217 properties = dict(section_id = self.tbl_section.c.id,
3218 section=self.tbl_section.c.section))
3220 mapper(DBSource, self.tbl_source,
3221 properties = dict(source_id = self.tbl_source.c.id,
3222 version = self.tbl_source.c.version,
3223 maintainer_id = self.tbl_source.c.maintainer,
3224 poolfile_id = self.tbl_source.c.file,
3225 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3226 fingerprint_id = self.tbl_source.c.sig_fpr,
3227 fingerprint = relation(Fingerprint),
3228 changedby_id = self.tbl_source.c.changedby,
3229 srcfiles = relation(DSCFile,
3230 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3231 suites = relation(Suite, secondary=self.tbl_src_associations,
3233 srcuploaders = relation(SrcUploader)),
3234 extension = validator)
3236 mapper(SourceACL, self.tbl_source_acl,
3237 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3239 mapper(SrcFormat, self.tbl_src_format,
3240 properties = dict(src_format_id = self.tbl_src_format.c.id,
3241 format_name = self.tbl_src_format.c.format_name))
3243 mapper(SrcUploader, self.tbl_src_uploaders,
3244 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3245 source_id = self.tbl_src_uploaders.c.source,
3246 source = relation(DBSource,
3247 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3248 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3249 maintainer = relation(Maintainer,
3250 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3252 mapper(Suite, self.tbl_suite,
3253 properties = dict(suite_id = self.tbl_suite.c.id,
3254 policy_queue = relation(PolicyQueue),
3255 copy_queues = relation(BuildQueue,
3256 secondary=self.tbl_suite_build_queue_copy)),
3257 extension = validator)
3259 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3260 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3261 suite = relation(Suite, backref='suitesrcformats'),
3262 src_format_id = self.tbl_suite_src_formats.c.src_format,
3263 src_format = relation(SrcFormat)))
3265 mapper(Uid, self.tbl_uid,
3266 properties = dict(uid_id = self.tbl_uid.c.id,
3267 fingerprint = relation(Fingerprint)),
3268 extension = validator)
3270 mapper(UploadBlock, self.tbl_upload_blocks,
3271 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3272 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3273 uid = relation(Uid, backref="uploadblocks")))
3275 ## Connection functions
3276 def __createconn(self):
3277 from config import Config
3281 connstr = "postgres://%s" % cnf["DB::Host"]
3282 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3283 connstr += ":%s" % cnf["DB::Port"]
3284 connstr += "/%s" % cnf["DB::Name"]
3287 connstr = "postgres:///%s" % cnf["DB::Name"]
3288 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3289 connstr += "?port=%s" % cnf["DB::Port"]
3291 self.db_pg = create_engine(connstr, echo=self.debug)
3292 self.db_meta = MetaData()
3293 self.db_meta.bind = self.db_pg
3294 self.db_smaker = sessionmaker(bind=self.db_pg,
3298 self.__setuptables()
3299 self.__setupmappers()
3302 return self.db_smaker()
3304 __all__.append('DBConn')