5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
63 from sqlalchemy import types as sqltypes
65 # Don't remove this, we re-export the exceptions to scripts which import us
66 from sqlalchemy.exc import *
67 from sqlalchemy.orm.exc import NoResultFound
69 # Only import Config until Queue stuff is changed to store its config
71 from config import Config
72 from textutils import fix_maintainer
73 from dak_exceptions import DBUpdateError, NoSourceFieldError
75 # suppress some deprecation warnings in squeeze related to sqlalchemy
77 warnings.filterwarnings('ignore', \
78 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
82 ################################################################################
84 # Patch in support for the debversion field type so that it works during
88 # that is for sqlalchemy 0.6
89 UserDefinedType = sqltypes.UserDefinedType
91 # this one for sqlalchemy 0.5
92 UserDefinedType = sqltypes.TypeEngine
94 class DebVersion(UserDefinedType):
95 def get_col_spec(self):
98 def bind_processor(self, dialect):
101 # ' = None' is needed for sqlalchemy 0.5:
102 def result_processor(self, dialect, coltype = None):
105 sa_major_version = sqlalchemy.__version__[0:3]
106 if sa_major_version in ["0.5", "0.6"]:
107 from sqlalchemy.databases import postgres
108 postgres.ischema_names['debversion'] = DebVersion
110 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
112 ################################################################################
114 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
116 ################################################################################
118 def session_wrapper(fn):
120 Wrapper around common ".., session=None):" handling. If the wrapped
121 function is called without passing 'session', we create a local one
122 and destroy it when the function ends.
124 Also attaches a commit_or_flush method to the session; if we created a
125 local session, this is a synonym for session.commit(), otherwise it is a
126 synonym for session.flush().
129 def wrapped(*args, **kwargs):
130 private_transaction = False
132 # Find the session object
133 session = kwargs.get('session')
136 if len(args) <= len(getargspec(fn)[0]) - 1:
137 # No session specified as last argument or in kwargs
138 private_transaction = True
139 session = kwargs['session'] = DBConn().session()
141 # Session is last argument in args
145 session = args[-1] = DBConn().session()
146 private_transaction = True
148 if private_transaction:
149 session.commit_or_flush = session.commit
151 session.commit_or_flush = session.flush
154 return fn(*args, **kwargs)
156 if private_transaction:
157 # We created a session; close it.
160 wrapped.__doc__ = fn.__doc__
161 wrapped.func_name = fn.func_name
165 __all__.append('session_wrapper')
167 ################################################################################
169 class ORMObject(object):
171 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
172 derived classes must implement the properties() method.
175 def properties(self):
177 This method should be implemented by all derived classes and returns a
178 list of the important properties. The properties 'created' and
179 'modified' will be added automatically. A suffix '_count' should be
180 added to properties that are lists or query objects. The most important
181 property name should be returned as the first element in the list
182 because it is used by repr().
188 Returns a JSON representation of the object based on the properties
189 returned from the properties() method.
192 # add created and modified
193 all_properties = self.properties() + ['created', 'modified']
194 for property in all_properties:
195 # check for list or query
196 if property[-6:] == '_count':
197 real_property = property[:-6]
198 if not hasattr(self, real_property):
200 value = getattr(self, real_property)
201 if hasattr(value, '__len__'):
204 elif hasattr(value, 'count'):
206 value = value.count()
208 raise KeyError('Do not understand property %s.' % property)
210 if not hasattr(self, property):
213 value = getattr(self, property)
217 elif isinstance(value, ORMObject):
218 # use repr() for ORMObject types
221 # we want a string for all other types because json cannot
224 data[property] = value
225 return json.dumps(data)
229 Returns the name of the class.
231 return type(self).__name__
235 Returns a short string representation of the object using the first
236 element from the properties() method.
238 primary_property = self.properties()[0]
239 value = getattr(self, primary_property)
240 return '<%s %s>' % (self.classname(), str(value))
244 Returns a human readable form of the object using the properties()
247 return '<%s %s>' % (self.classname(), self.json())
249 def not_null_constraints(self):
251 Returns a list of properties that must be not NULL. Derived classes
252 should override this method if needed.
256 validation_message = \
257 "Validation failed because property '%s' must not be empty in object\n%s"
261 This function validates the not NULL constraints as returned by
262 not_null_constraints(). It raises the DBUpdateError exception if
265 for property in self.not_null_constraints():
266 # TODO: It is a bit awkward that the mapper configuration allow
267 # directly setting the numeric _id columns. We should get rid of it
269 if hasattr(self, property + '_id') and \
270 getattr(self, property + '_id') is not None:
272 if not hasattr(self, property) or getattr(self, property) is None:
273 raise DBUpdateError(self.validation_message % \
274 (property, str(self)))
278 def get(cls, primary_key, session = None):
280 This is a support function that allows getting an object by its primary
283 Architecture.get(3[, session])
285 instead of the more verbose
287 session.query(Architecture).get(3)
289 return session.query(cls).get(primary_key)
291 def session(self, replace = False):
293 Returns the current session that is associated with the object. May
294 return None is object is in detached state.
297 return object_session(self)
299 def clone(self, session = None):
301 Clones the current object in a new session and returns the new clone. A
302 fresh session is created if the optional session parameter is not
303 provided. The function will fail if a session is provided and has
306 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
307 an existing object to allow several threads to work with their own
308 instances of an ORMObject.
310 WARNING: Only persistent (committed) objects can be cloned. Changes
311 made to the original object that are not committed yet will get lost.
312 The session of the new object will always be rolled back to avoid
316 if self.session() is None:
317 raise RuntimeError( \
318 'Method clone() failed for detached object:\n%s' % self)
319 self.session().flush()
320 mapper = object_mapper(self)
321 primary_key = mapper.primary_key_from_instance(self)
322 object_class = self.__class__
324 session = DBConn().session()
325 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
326 raise RuntimeError( \
327 'Method clone() failed due to unflushed changes in session.')
328 new_object = session.query(object_class).get(primary_key)
330 if new_object is None:
331 raise RuntimeError( \
332 'Method clone() failed for non-persistent object:\n%s' % self)
335 __all__.append('ORMObject')
337 ################################################################################
339 class Validator(MapperExtension):
341 This class calls the validate() method for each instance for the
342 'before_update' and 'before_insert' events. A global object validator is
343 used for configuring the individual mappers.
346 def before_update(self, mapper, connection, instance):
350 def before_insert(self, mapper, connection, instance):
354 validator = Validator()
356 ################################################################################
358 class Architecture(ORMObject):
359 def __init__(self, arch_string = None, description = None):
360 self.arch_string = arch_string
361 self.description = description
363 def __eq__(self, val):
364 if isinstance(val, str):
365 return (self.arch_string== val)
366 # This signals to use the normal comparison operator
367 return NotImplemented
369 def __ne__(self, val):
370 if isinstance(val, str):
371 return (self.arch_string != val)
372 # This signals to use the normal comparison operator
373 return NotImplemented
375 def properties(self):
376 return ['arch_string', 'arch_id', 'suites_count']
378 def not_null_constraints(self):
379 return ['arch_string']
381 __all__.append('Architecture')
384 def get_architecture(architecture, session=None):
386 Returns database id for given C{architecture}.
388 @type architecture: string
389 @param architecture: The name of the architecture
391 @type session: Session
392 @param session: Optional SQLA session object (a temporary one will be
393 generated if not supplied)
396 @return: Architecture object for the given arch (None if not present)
399 q = session.query(Architecture).filter_by(arch_string=architecture)
403 except NoResultFound:
406 __all__.append('get_architecture')
408 # TODO: should be removed because the implementation is too trivial
410 def get_architecture_suites(architecture, session=None):
412 Returns list of Suite objects for given C{architecture} name
414 @type architecture: str
415 @param architecture: Architecture name to search for
417 @type session: Session
418 @param session: Optional SQL session object (a temporary one will be
419 generated if not supplied)
422 @return: list of Suite objects for the given name (may be empty)
425 return get_architecture(architecture, session).suites
427 __all__.append('get_architecture_suites')
429 ################################################################################
431 class Archive(object):
432 def __init__(self, *args, **kwargs):
436 return '<Archive %s>' % self.archive_name
438 __all__.append('Archive')
441 def get_archive(archive, session=None):
443 returns database id for given C{archive}.
445 @type archive: string
446 @param archive: the name of the arhive
448 @type session: Session
449 @param session: Optional SQLA session object (a temporary one will be
450 generated if not supplied)
453 @return: Archive object for the given name (None if not present)
456 archive = archive.lower()
458 q = session.query(Archive).filter_by(archive_name=archive)
462 except NoResultFound:
465 __all__.append('get_archive')
467 ################################################################################
469 class BinContents(ORMObject):
470 def __init__(self, file = None, binary = None):
474 def properties(self):
475 return ['file', 'binary']
477 __all__.append('BinContents')
479 ################################################################################
481 class DBBinary(ORMObject):
482 def __init__(self, package = None, source = None, version = None, \
483 maintainer = None, architecture = None, poolfile = None, \
485 self.package = package
487 self.version = version
488 self.maintainer = maintainer
489 self.architecture = architecture
490 self.poolfile = poolfile
491 self.binarytype = binarytype
493 def properties(self):
494 return ['package', 'version', 'maintainer', 'source', 'architecture', \
495 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
496 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
498 def not_null_constraints(self):
499 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
502 def get_component_name(self):
503 return self.poolfile.location.component.component_name
505 def scan_contents(self):
507 Yields the contents of the package. Only regular files are yielded and
508 the path names are normalized after converting them from either utf-8
509 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
510 package does not contain any regular file.
512 fullpath = self.poolfile.fullpath
513 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE)
514 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
515 for member in tar.getmembers():
516 if not member.isdir():
517 name = normpath(member.name)
518 # enforce proper utf-8 encoding
521 except UnicodeDecodeError:
522 name = name.decode('iso8859-1').encode('utf-8')
528 def read_control(self):
530 Reads the control information from a binary.
533 @return: (stanza, controldict) stanza is the text of the control
534 section. controldict is the information in a dictionary
537 import apt_inst, apt_pk
538 fullpath = self.poolfile.fullpath
539 deb_file = open(fullpath, 'r')
540 stanza = apt_inst.debExtractControl(deb_file).rstrip()
541 control = dict(apt_pkg.TagSection(stanza))
544 return stanza, control
547 __all__.append('DBBinary')
550 def get_suites_binary_in(package, session=None):
552 Returns list of Suite objects which given C{package} name is in
555 @param package: DBBinary package name to search for
558 @return: list of Suite objects for the given package
561 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
563 __all__.append('get_suites_binary_in')
566 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
568 Returns the component name of the newest binary package in suite_list or
569 None if no package is found. The result can be optionally filtered by a list
570 of architecture names.
573 @param package: DBBinary package name to search for
575 @type suite_list: list of str
576 @param suite_list: list of suite_name items
578 @type arch_list: list of str
579 @param arch_list: optional list of arch_string items that defaults to []
581 @rtype: str or NoneType
582 @return: name of component or None
585 q = session.query(DBBinary).filter_by(package = package). \
586 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
587 if len(arch_list) > 0:
588 q = q.join(DBBinary.architecture). \
589 filter(Architecture.arch_string.in_(arch_list))
590 binary = q.order_by(desc(DBBinary.version)).first()
594 return binary.get_component_name()
596 __all__.append('get_component_by_package_suite')
598 ################################################################################
600 class BinaryACL(object):
601 def __init__(self, *args, **kwargs):
605 return '<BinaryACL %s>' % self.binary_acl_id
607 __all__.append('BinaryACL')
609 ################################################################################
611 class BinaryACLMap(object):
612 def __init__(self, *args, **kwargs):
616 return '<BinaryACLMap %s>' % self.binary_acl_map_id
618 __all__.append('BinaryACLMap')
620 ################################################################################
625 ArchiveDir "%(archivepath)s";
626 OverrideDir "%(overridedir)s";
627 CacheDir "%(cachedir)s";
632 Packages::Compress ". bzip2 gzip";
633 Sources::Compress ". bzip2 gzip";
638 bindirectory "incoming"
643 BinOverride "override.sid.all3";
644 BinCacheDB "packages-accepted.db";
646 FileList "%(filelist)s";
649 Packages::Extensions ".deb .udeb";
652 bindirectory "incoming/"
655 BinOverride "override.sid.all3";
656 SrcOverride "override.sid.all3.src";
657 FileList "%(filelist)s";
661 class BuildQueue(object):
662 def __init__(self, *args, **kwargs):
666 return '<BuildQueue %s>' % self.queue_name
668 def write_metadata(self, starttime, force=False):
669 # Do we write out metafiles?
670 if not (force or self.generate_metadata):
673 session = DBConn().session().object_session(self)
675 fl_fd = fl_name = ac_fd = ac_name = None
677 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
678 startdir = os.getcwd()
681 # Grab files we want to include
682 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
683 # Write file list with newer files
684 (fl_fd, fl_name) = mkstemp()
686 os.write(fl_fd, '%s\n' % n.fullpath)
691 # Write minimal apt.conf
692 # TODO: Remove hardcoding from template
693 (ac_fd, ac_name) = mkstemp()
694 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
696 'cachedir': cnf["Dir::Cache"],
697 'overridedir': cnf["Dir::Override"],
701 # Run apt-ftparchive generate
702 os.chdir(os.path.dirname(ac_name))
703 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
705 # Run apt-ftparchive release
706 # TODO: Eww - fix this
707 bname = os.path.basename(self.path)
711 # We have to remove the Release file otherwise it'll be included in the
714 os.unlink(os.path.join(bname, 'Release'))
718 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
720 # Crude hack with open and append, but this whole section is and should be redone.
721 if self.notautomatic:
722 release=open("Release", "a")
723 release.write("NotAutomatic: yes")
728 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
729 if cnf.has_key("Dinstall::SigningPubKeyring"):
730 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
732 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
734 # Move the files if we got this far
735 os.rename('Release', os.path.join(bname, 'Release'))
737 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
739 # Clean up any left behind files
766 def clean_and_update(self, starttime, Logger, dryrun=False):
767 """WARNING: This routine commits for you"""
768 session = DBConn().session().object_session(self)
770 if self.generate_metadata and not dryrun:
771 self.write_metadata(starttime)
773 # Grab files older than our execution time
774 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
780 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
782 Logger.log(["I: Removing %s from the queue" % o.fullpath])
783 os.unlink(o.fullpath)
786 # If it wasn't there, don't worry
787 if e.errno == ENOENT:
790 # TODO: Replace with proper logging call
791 Logger.log(["E: Could not remove %s" % o.fullpath])
798 for f in os.listdir(self.path):
799 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
803 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
804 except NoResultFound:
805 fp = os.path.join(self.path, f)
807 Logger.log(["I: Would remove unused link %s" % fp])
809 Logger.log(["I: Removing unused link %s" % fp])
813 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
815 def add_file_from_pool(self, poolfile):
816 """Copies a file into the pool. Assumes that the PoolFile object is
817 attached to the same SQLAlchemy session as the Queue object is.
819 The caller is responsible for committing after calling this function."""
820 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
822 # Check if we have a file of this name or this ID already
823 for f in self.queuefiles:
824 if f.fileid is not None and f.fileid == poolfile.file_id or \
825 f.poolfile.filename == poolfile_basename:
826 # In this case, update the BuildQueueFile entry so we
827 # don't remove it too early
828 f.lastused = datetime.now()
829 DBConn().session().object_session(poolfile).add(f)
832 # Prepare BuildQueueFile object
833 qf = BuildQueueFile()
834 qf.build_queue_id = self.queue_id
835 qf.lastused = datetime.now()
836 qf.filename = poolfile_basename
838 targetpath = poolfile.fullpath
839 queuepath = os.path.join(self.path, poolfile_basename)
843 # We need to copy instead of symlink
845 utils.copy(targetpath, queuepath)
846 # NULL in the fileid field implies a copy
849 os.symlink(targetpath, queuepath)
850 qf.fileid = poolfile.file_id
854 # Get the same session as the PoolFile is using and add the qf to it
855 DBConn().session().object_session(poolfile).add(qf)
860 __all__.append('BuildQueue')
863 def get_build_queue(queuename, session=None):
865 Returns BuildQueue object for given C{queue name}, creating it if it does not
868 @type queuename: string
869 @param queuename: The name of the queue
871 @type session: Session
872 @param session: Optional SQLA session object (a temporary one will be
873 generated if not supplied)
876 @return: BuildQueue object for the given queue
879 q = session.query(BuildQueue).filter_by(queue_name=queuename)
883 except NoResultFound:
886 __all__.append('get_build_queue')
888 ################################################################################
890 class BuildQueueFile(object):
891 def __init__(self, *args, **kwargs):
895 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
899 return os.path.join(self.buildqueue.path, self.filename)
902 __all__.append('BuildQueueFile')
904 ################################################################################
906 class ChangePendingBinary(object):
907 def __init__(self, *args, **kwargs):
911 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
913 __all__.append('ChangePendingBinary')
915 ################################################################################
917 class ChangePendingFile(object):
918 def __init__(self, *args, **kwargs):
922 return '<ChangePendingFile %s>' % self.change_pending_file_id
924 __all__.append('ChangePendingFile')
926 ################################################################################
928 class ChangePendingSource(object):
929 def __init__(self, *args, **kwargs):
933 return '<ChangePendingSource %s>' % self.change_pending_source_id
935 __all__.append('ChangePendingSource')
937 ################################################################################
939 class Component(ORMObject):
940 def __init__(self, component_name = None):
941 self.component_name = component_name
943 def __eq__(self, val):
944 if isinstance(val, str):
945 return (self.component_name == val)
946 # This signals to use the normal comparison operator
947 return NotImplemented
949 def __ne__(self, val):
950 if isinstance(val, str):
951 return (self.component_name != val)
952 # This signals to use the normal comparison operator
953 return NotImplemented
955 def properties(self):
956 return ['component_name', 'component_id', 'description', \
957 'location_count', 'meets_dfsg', 'overrides_count']
959 def not_null_constraints(self):
960 return ['component_name']
963 __all__.append('Component')
966 def get_component(component, session=None):
968 Returns database id for given C{component}.
970 @type component: string
971 @param component: The name of the override type
974 @return: the database id for the given component
977 component = component.lower()
979 q = session.query(Component).filter_by(component_name=component)
983 except NoResultFound:
986 __all__.append('get_component')
988 ################################################################################
990 class DBConfig(object):
991 def __init__(self, *args, **kwargs):
995 return '<DBConfig %s>' % self.name
997 __all__.append('DBConfig')
999 ################################################################################
1002 def get_or_set_contents_file_id(filename, session=None):
1004 Returns database id for given filename.
1006 If no matching file is found, a row is inserted.
1008 @type filename: string
1009 @param filename: The filename
1010 @type session: SQLAlchemy
1011 @param session: Optional SQL session object (a temporary one will be
1012 generated if not supplied). If not passed, a commit will be performed at
1013 the end of the function, otherwise the caller is responsible for commiting.
1016 @return: the database id for the given component
1019 q = session.query(ContentFilename).filter_by(filename=filename)
1022 ret = q.one().cafilename_id
1023 except NoResultFound:
1024 cf = ContentFilename()
1025 cf.filename = filename
1027 session.commit_or_flush()
1028 ret = cf.cafilename_id
1032 __all__.append('get_or_set_contents_file_id')
1035 def get_contents(suite, overridetype, section=None, session=None):
1037 Returns contents for a suite / overridetype combination, limiting
1038 to a section if not None.
1041 @param suite: Suite object
1043 @type overridetype: OverrideType
1044 @param overridetype: OverrideType object
1046 @type section: Section
1047 @param section: Optional section object to limit results to
1049 @type session: SQLAlchemy
1050 @param session: Optional SQL session object (a temporary one will be
1051 generated if not supplied)
1053 @rtype: ResultsProxy
1054 @return: ResultsProxy object set up to return tuples of (filename, section,
1058 # find me all of the contents for a given suite
1059 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1063 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1064 JOIN content_file_names n ON (c.filename=n.id)
1065 JOIN binaries b ON (b.id=c.binary_pkg)
1066 JOIN override o ON (o.package=b.package)
1067 JOIN section s ON (s.id=o.section)
1068 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1069 AND b.type=:overridetypename"""
1071 vals = {'suiteid': suite.suite_id,
1072 'overridetypeid': overridetype.overridetype_id,
1073 'overridetypename': overridetype.overridetype}
1075 if section is not None:
1076 contents_q += " AND s.id = :sectionid"
1077 vals['sectionid'] = section.section_id
1079 contents_q += " ORDER BY fn"
1081 return session.execute(contents_q, vals)
1083 __all__.append('get_contents')
1085 ################################################################################
1087 class ContentFilepath(object):
1088 def __init__(self, *args, **kwargs):
1092 return '<ContentFilepath %s>' % self.filepath
1094 __all__.append('ContentFilepath')
1097 def get_or_set_contents_path_id(filepath, session=None):
1099 Returns database id for given path.
1101 If no matching file is found, a row is inserted.
1103 @type filepath: string
1104 @param filepath: The filepath
1106 @type session: SQLAlchemy
1107 @param session: Optional SQL session object (a temporary one will be
1108 generated if not supplied). If not passed, a commit will be performed at
1109 the end of the function, otherwise the caller is responsible for commiting.
1112 @return: the database id for the given path
1115 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1118 ret = q.one().cafilepath_id
1119 except NoResultFound:
1120 cf = ContentFilepath()
1121 cf.filepath = filepath
1123 session.commit_or_flush()
1124 ret = cf.cafilepath_id
1128 __all__.append('get_or_set_contents_path_id')
1130 ################################################################################
1132 class ContentAssociation(object):
1133 def __init__(self, *args, **kwargs):
1137 return '<ContentAssociation %s>' % self.ca_id
1139 __all__.append('ContentAssociation')
1141 def insert_content_paths(binary_id, fullpaths, session=None):
1143 Make sure given path is associated with given binary id
1145 @type binary_id: int
1146 @param binary_id: the id of the binary
1147 @type fullpaths: list
1148 @param fullpaths: the list of paths of the file being associated with the binary
1149 @type session: SQLAlchemy session
1150 @param session: Optional SQLAlchemy session. If this is passed, the caller
1151 is responsible for ensuring a transaction has begun and committing the
1152 results or rolling back based on the result code. If not passed, a commit
1153 will be performed at the end of the function, otherwise the caller is
1154 responsible for commiting.
1156 @return: True upon success
1159 privatetrans = False
1161 session = DBConn().session()
1166 def generate_path_dicts():
1167 for fullpath in fullpaths:
1168 if fullpath.startswith( './' ):
1169 fullpath = fullpath[2:]
1171 yield {'filename':fullpath, 'id': binary_id }
1173 for d in generate_path_dicts():
1174 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1183 traceback.print_exc()
1185 # Only rollback if we set up the session ourself
1192 __all__.append('insert_content_paths')
1194 ################################################################################
1196 class DSCFile(object):
1197 def __init__(self, *args, **kwargs):
1201 return '<DSCFile %s>' % self.dscfile_id
1203 __all__.append('DSCFile')
1206 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1208 Returns a list of DSCFiles which may be empty
1210 @type dscfile_id: int (optional)
1211 @param dscfile_id: the dscfile_id of the DSCFiles to find
1213 @type source_id: int (optional)
1214 @param source_id: the source id related to the DSCFiles to find
1216 @type poolfile_id: int (optional)
1217 @param poolfile_id: the poolfile id related to the DSCFiles to find
1220 @return: Possibly empty list of DSCFiles
1223 q = session.query(DSCFile)
1225 if dscfile_id is not None:
1226 q = q.filter_by(dscfile_id=dscfile_id)
1228 if source_id is not None:
1229 q = q.filter_by(source_id=source_id)
1231 if poolfile_id is not None:
1232 q = q.filter_by(poolfile_id=poolfile_id)
1236 __all__.append('get_dscfiles')
1238 ################################################################################
1240 class PoolFile(ORMObject):
1241 def __init__(self, filename = None, location = None, filesize = -1, \
1243 self.filename = filename
1244 self.location = location
1245 self.filesize = filesize
1246 self.md5sum = md5sum
1250 return os.path.join(self.location.path, self.filename)
1252 def is_valid(self, filesize = -1, md5sum = None):
1253 return self.filesize == long(filesize) and self.md5sum == md5sum
1255 def properties(self):
1256 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1257 'sha256sum', 'location', 'source', 'binary', 'last_used']
1259 def not_null_constraints(self):
1260 return ['filename', 'md5sum', 'location']
1262 __all__.append('PoolFile')
1265 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1268 (ValidFileFound [boolean], PoolFile object or None)
1270 @type filename: string
1271 @param filename: the filename of the file to check against the DB
1274 @param filesize: the size of the file to check against the DB
1276 @type md5sum: string
1277 @param md5sum: the md5sum of the file to check against the DB
1279 @type location_id: int
1280 @param location_id: the id of the location to look in
1283 @return: Tuple of length 2.
1284 - If valid pool file found: (C{True}, C{PoolFile object})
1285 - If valid pool file not found:
1286 - (C{False}, C{None}) if no file found
1287 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1290 poolfile = session.query(Location).get(location_id). \
1291 files.filter_by(filename=filename).first()
1293 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1296 return (valid, poolfile)
1298 __all__.append('check_poolfile')
1300 # TODO: the implementation can trivially be inlined at the place where the
1301 # function is called
1303 def get_poolfile_by_id(file_id, session=None):
1305 Returns a PoolFile objects or None for the given id
1308 @param file_id: the id of the file to look for
1310 @rtype: PoolFile or None
1311 @return: either the PoolFile object or None
1314 return session.query(PoolFile).get(file_id)
1316 __all__.append('get_poolfile_by_id')
1319 def get_poolfile_like_name(filename, session=None):
1321 Returns an array of PoolFile objects which are like the given name
1323 @type filename: string
1324 @param filename: the filename of the file to check against the DB
1327 @return: array of PoolFile objects
1330 # TODO: There must be a way of properly using bind parameters with %FOO%
1331 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1335 __all__.append('get_poolfile_like_name')
1338 def add_poolfile(filename, datadict, location_id, session=None):
1340 Add a new file to the pool
1342 @type filename: string
1343 @param filename: filename
1345 @type datadict: dict
1346 @param datadict: dict with needed data
1348 @type location_id: int
1349 @param location_id: database id of the location
1352 @return: the PoolFile object created
1354 poolfile = PoolFile()
1355 poolfile.filename = filename
1356 poolfile.filesize = datadict["size"]
1357 poolfile.md5sum = datadict["md5sum"]
1358 poolfile.sha1sum = datadict["sha1sum"]
1359 poolfile.sha256sum = datadict["sha256sum"]
1360 poolfile.location_id = location_id
1362 session.add(poolfile)
1363 # Flush to get a file id (NB: This is not a commit)
1368 __all__.append('add_poolfile')
1370 ################################################################################
1372 class Fingerprint(ORMObject):
1373 def __init__(self, fingerprint = None):
1374 self.fingerprint = fingerprint
1376 def properties(self):
1377 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1380 def not_null_constraints(self):
1381 return ['fingerprint']
1383 __all__.append('Fingerprint')
1386 def get_fingerprint(fpr, session=None):
1388 Returns Fingerprint object for given fpr.
1391 @param fpr: The fpr to find / add
1393 @type session: SQLAlchemy
1394 @param session: Optional SQL session object (a temporary one will be
1395 generated if not supplied).
1398 @return: the Fingerprint object for the given fpr or None
1401 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1405 except NoResultFound:
1410 __all__.append('get_fingerprint')
1413 def get_or_set_fingerprint(fpr, session=None):
1415 Returns Fingerprint object for given fpr.
1417 If no matching fpr is found, a row is inserted.
1420 @param fpr: The fpr to find / add
1422 @type session: SQLAlchemy
1423 @param session: Optional SQL session object (a temporary one will be
1424 generated if not supplied). If not passed, a commit will be performed at
1425 the end of the function, otherwise the caller is responsible for commiting.
1426 A flush will be performed either way.
1429 @return: the Fingerprint object for the given fpr
1432 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1436 except NoResultFound:
1437 fingerprint = Fingerprint()
1438 fingerprint.fingerprint = fpr
1439 session.add(fingerprint)
1440 session.commit_or_flush()
1445 __all__.append('get_or_set_fingerprint')
1447 ################################################################################
1449 # Helper routine for Keyring class
1450 def get_ldap_name(entry):
1452 for k in ["cn", "mn", "sn"]:
1454 if ret and ret[0] != "" and ret[0] != "-":
1456 return " ".join(name)
1458 ################################################################################
1460 class Keyring(object):
1461 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1462 " --with-colons --fingerprint --fingerprint"
1467 def __init__(self, *args, **kwargs):
1471 return '<Keyring %s>' % self.keyring_name
1473 def de_escape_gpg_str(self, txt):
1474 esclist = re.split(r'(\\x..)', txt)
1475 for x in range(1,len(esclist),2):
1476 esclist[x] = "%c" % (int(esclist[x][2:],16))
1477 return "".join(esclist)
1479 def parse_address(self, uid):
1480 """parses uid and returns a tuple of real name and email address"""
1482 (name, address) = email.Utils.parseaddr(uid)
1483 name = re.sub(r"\s*[(].*[)]", "", name)
1484 name = self.de_escape_gpg_str(name)
1487 return (name, address)
1489 def load_keys(self, keyring):
1490 if not self.keyring_id:
1491 raise Exception('Must be initialized with database information')
1493 k = os.popen(self.gpg_invocation % keyring, "r")
1497 for line in k.xreadlines():
1498 field = line.split(":")
1499 if field[0] == "pub":
1502 (name, addr) = self.parse_address(field[9])
1504 self.keys[key]["email"] = addr
1505 self.keys[key]["name"] = name
1506 self.keys[key]["fingerprints"] = []
1508 elif key and field[0] == "sub" and len(field) >= 12:
1509 signingkey = ("s" in field[11])
1510 elif key and field[0] == "uid":
1511 (name, addr) = self.parse_address(field[9])
1512 if "email" not in self.keys[key] and "@" in addr:
1513 self.keys[key]["email"] = addr
1514 self.keys[key]["name"] = name
1515 elif signingkey and field[0] == "fpr":
1516 self.keys[key]["fingerprints"].append(field[9])
1517 self.fpr_lookup[field[9]] = key
1519 def import_users_from_ldap(self, session):
1523 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1524 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1526 l = ldap.open(LDAPServer)
1527 l.simple_bind_s("","")
1528 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1529 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1530 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1532 ldap_fin_uid_id = {}
1539 uid = entry["uid"][0]
1540 name = get_ldap_name(entry)
1541 fingerprints = entry["keyFingerPrint"]
1543 for f in fingerprints:
1544 key = self.fpr_lookup.get(f, None)
1545 if key not in self.keys:
1547 self.keys[key]["uid"] = uid
1551 keyid = get_or_set_uid(uid, session).uid_id
1552 byuid[keyid] = (uid, name)
1553 byname[uid] = (keyid, name)
1555 return (byname, byuid)
1557 def generate_users_from_keyring(self, format, session):
1561 for x in self.keys.keys():
1562 if "email" not in self.keys[x]:
1564 self.keys[x]["uid"] = format % "invalid-uid"
1566 uid = format % self.keys[x]["email"]
1567 keyid = get_or_set_uid(uid, session).uid_id
1568 byuid[keyid] = (uid, self.keys[x]["name"])
1569 byname[uid] = (keyid, self.keys[x]["name"])
1570 self.keys[x]["uid"] = uid
1573 uid = format % "invalid-uid"
1574 keyid = get_or_set_uid(uid, session).uid_id
1575 byuid[keyid] = (uid, "ungeneratable user id")
1576 byname[uid] = (keyid, "ungeneratable user id")
1578 return (byname, byuid)
1580 __all__.append('Keyring')
1583 def get_keyring(keyring, session=None):
1585 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1586 If C{keyring} already has an entry, simply return the existing Keyring
1588 @type keyring: string
1589 @param keyring: the keyring name
1592 @return: the Keyring object for this keyring
1595 q = session.query(Keyring).filter_by(keyring_name=keyring)
1599 except NoResultFound:
1602 __all__.append('get_keyring')
1604 ################################################################################
1606 class KeyringACLMap(object):
1607 def __init__(self, *args, **kwargs):
1611 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1613 __all__.append('KeyringACLMap')
1615 ################################################################################
1617 class DBChange(object):
1618 def __init__(self, *args, **kwargs):
1622 return '<DBChange %s>' % self.changesname
1624 def clean_from_queue(self):
1625 session = DBConn().session().object_session(self)
1627 # Remove changes_pool_files entries
1630 # Remove changes_pending_files references
1633 # Clear out of queue
1634 self.in_queue = None
1635 self.approved_for_id = None
1637 __all__.append('DBChange')
1640 def get_dbchange(filename, session=None):
1642 returns DBChange object for given C{filename}.
1644 @type filename: string
1645 @param filename: the name of the file
1647 @type session: Session
1648 @param session: Optional SQLA session object (a temporary one will be
1649 generated if not supplied)
1652 @return: DBChange object for the given filename (C{None} if not present)
1655 q = session.query(DBChange).filter_by(changesname=filename)
1659 except NoResultFound:
1662 __all__.append('get_dbchange')
1664 ################################################################################
1666 class Location(ORMObject):
1667 def __init__(self, path = None, component = None):
1669 self.component = component
1670 # the column 'type' should go away, see comment at mapper
1671 self.archive_type = 'pool'
1673 def properties(self):
1674 return ['path', 'location_id', 'archive_type', 'component', \
1677 def not_null_constraints(self):
1678 return ['path', 'archive_type']
1680 __all__.append('Location')
1683 def get_location(location, component=None, archive=None, session=None):
1685 Returns Location object for the given combination of location, component
1688 @type location: string
1689 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1691 @type component: string
1692 @param component: the component name (if None, no restriction applied)
1694 @type archive: string
1695 @param archive: the archive name (if None, no restriction applied)
1697 @rtype: Location / None
1698 @return: Either a Location object or None if one can't be found
1701 q = session.query(Location).filter_by(path=location)
1703 if archive is not None:
1704 q = q.join(Archive).filter_by(archive_name=archive)
1706 if component is not None:
1707 q = q.join(Component).filter_by(component_name=component)
1711 except NoResultFound:
1714 __all__.append('get_location')
1716 ################################################################################
1718 class Maintainer(ORMObject):
1719 def __init__(self, name = None):
1722 def properties(self):
1723 return ['name', 'maintainer_id']
1725 def not_null_constraints(self):
1728 def get_split_maintainer(self):
1729 if not hasattr(self, 'name') or self.name is None:
1730 return ('', '', '', '')
1732 return fix_maintainer(self.name.strip())
1734 __all__.append('Maintainer')
1737 def get_or_set_maintainer(name, session=None):
1739 Returns Maintainer object for given maintainer name.
1741 If no matching maintainer name is found, a row is inserted.
1744 @param name: The maintainer name to add
1746 @type session: SQLAlchemy
1747 @param session: Optional SQL session object (a temporary one will be
1748 generated if not supplied). If not passed, a commit will be performed at
1749 the end of the function, otherwise the caller is responsible for commiting.
1750 A flush will be performed either way.
1753 @return: the Maintainer object for the given maintainer
1756 q = session.query(Maintainer).filter_by(name=name)
1759 except NoResultFound:
1760 maintainer = Maintainer()
1761 maintainer.name = name
1762 session.add(maintainer)
1763 session.commit_or_flush()
1768 __all__.append('get_or_set_maintainer')
1771 def get_maintainer(maintainer_id, session=None):
1773 Return the name of the maintainer behind C{maintainer_id} or None if that
1774 maintainer_id is invalid.
1776 @type maintainer_id: int
1777 @param maintainer_id: the id of the maintainer
1780 @return: the Maintainer with this C{maintainer_id}
1783 return session.query(Maintainer).get(maintainer_id)
1785 __all__.append('get_maintainer')
1787 ################################################################################
1789 class NewComment(object):
1790 def __init__(self, *args, **kwargs):
1794 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1796 __all__.append('NewComment')
1799 def has_new_comment(package, version, session=None):
1801 Returns true if the given combination of C{package}, C{version} has a comment.
1803 @type package: string
1804 @param package: name of the package
1806 @type version: string
1807 @param version: package version
1809 @type session: Session
1810 @param session: Optional SQLA session object (a temporary one will be
1811 generated if not supplied)
1817 q = session.query(NewComment)
1818 q = q.filter_by(package=package)
1819 q = q.filter_by(version=version)
1821 return bool(q.count() > 0)
1823 __all__.append('has_new_comment')
1826 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1828 Returns (possibly empty) list of NewComment objects for the given
1831 @type package: string (optional)
1832 @param package: name of the package
1834 @type version: string (optional)
1835 @param version: package version
1837 @type comment_id: int (optional)
1838 @param comment_id: An id of a comment
1840 @type session: Session
1841 @param session: Optional SQLA session object (a temporary one will be
1842 generated if not supplied)
1845 @return: A (possibly empty) list of NewComment objects will be returned
1848 q = session.query(NewComment)
1849 if package is not None: q = q.filter_by(package=package)
1850 if version is not None: q = q.filter_by(version=version)
1851 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1855 __all__.append('get_new_comments')
1857 ################################################################################
1859 class Override(ORMObject):
1860 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1861 section = None, priority = None):
1862 self.package = package
1864 self.component = component
1865 self.overridetype = overridetype
1866 self.section = section
1867 self.priority = priority
1869 def properties(self):
1870 return ['package', 'suite', 'component', 'overridetype', 'section', \
1873 def not_null_constraints(self):
1874 return ['package', 'suite', 'component', 'overridetype', 'section']
1876 __all__.append('Override')
1879 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1881 Returns Override object for the given parameters
1883 @type package: string
1884 @param package: The name of the package
1886 @type suite: string, list or None
1887 @param suite: The name of the suite (or suites if a list) to limit to. If
1888 None, don't limit. Defaults to None.
1890 @type component: string, list or None
1891 @param component: The name of the component (or components if a list) to
1892 limit to. If None, don't limit. Defaults to None.
1894 @type overridetype: string, list or None
1895 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1896 limit to. If None, don't limit. Defaults to None.
1898 @type session: Session
1899 @param session: Optional SQLA session object (a temporary one will be
1900 generated if not supplied)
1903 @return: A (possibly empty) list of Override objects will be returned
1906 q = session.query(Override)
1907 q = q.filter_by(package=package)
1909 if suite is not None:
1910 if not isinstance(suite, list): suite = [suite]
1911 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1913 if component is not None:
1914 if not isinstance(component, list): component = [component]
1915 q = q.join(Component).filter(Component.component_name.in_(component))
1917 if overridetype is not None:
1918 if not isinstance(overridetype, list): overridetype = [overridetype]
1919 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1923 __all__.append('get_override')
1926 ################################################################################
1928 class OverrideType(ORMObject):
1929 def __init__(self, overridetype = None):
1930 self.overridetype = overridetype
1932 def properties(self):
1933 return ['overridetype', 'overridetype_id', 'overrides_count']
1935 def not_null_constraints(self):
1936 return ['overridetype']
1938 __all__.append('OverrideType')
1941 def get_override_type(override_type, session=None):
1943 Returns OverrideType object for given C{override type}.
1945 @type override_type: string
1946 @param override_type: The name of the override type
1948 @type session: Session
1949 @param session: Optional SQLA session object (a temporary one will be
1950 generated if not supplied)
1953 @return: the database id for the given override type
1956 q = session.query(OverrideType).filter_by(overridetype=override_type)
1960 except NoResultFound:
1963 __all__.append('get_override_type')
1965 ################################################################################
1967 class PolicyQueue(object):
1968 def __init__(self, *args, **kwargs):
1972 return '<PolicyQueue %s>' % self.queue_name
1974 __all__.append('PolicyQueue')
1977 def get_policy_queue(queuename, session=None):
1979 Returns PolicyQueue object for given C{queue name}
1981 @type queuename: string
1982 @param queuename: The name of the queue
1984 @type session: Session
1985 @param session: Optional SQLA session object (a temporary one will be
1986 generated if not supplied)
1989 @return: PolicyQueue object for the given queue
1992 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1996 except NoResultFound:
1999 __all__.append('get_policy_queue')
2002 def get_policy_queue_from_path(pathname, session=None):
2004 Returns PolicyQueue object for given C{path name}
2006 @type queuename: string
2007 @param queuename: The path
2009 @type session: Session
2010 @param session: Optional SQLA session object (a temporary one will be
2011 generated if not supplied)
2014 @return: PolicyQueue object for the given queue
2017 q = session.query(PolicyQueue).filter_by(path=pathname)
2021 except NoResultFound:
2024 __all__.append('get_policy_queue_from_path')
2026 ################################################################################
2028 class Priority(ORMObject):
2029 def __init__(self, priority = None, level = None):
2030 self.priority = priority
2033 def properties(self):
2034 return ['priority', 'priority_id', 'level', 'overrides_count']
2036 def not_null_constraints(self):
2037 return ['priority', 'level']
2039 def __eq__(self, val):
2040 if isinstance(val, str):
2041 return (self.priority == val)
2042 # This signals to use the normal comparison operator
2043 return NotImplemented
2045 def __ne__(self, val):
2046 if isinstance(val, str):
2047 return (self.priority != val)
2048 # This signals to use the normal comparison operator
2049 return NotImplemented
2051 __all__.append('Priority')
2054 def get_priority(priority, session=None):
2056 Returns Priority object for given C{priority name}.
2058 @type priority: string
2059 @param priority: The name of the priority
2061 @type session: Session
2062 @param session: Optional SQLA session object (a temporary one will be
2063 generated if not supplied)
2066 @return: Priority object for the given priority
2069 q = session.query(Priority).filter_by(priority=priority)
2073 except NoResultFound:
2076 __all__.append('get_priority')
2079 def get_priorities(session=None):
2081 Returns dictionary of priority names -> id mappings
2083 @type session: Session
2084 @param session: Optional SQL session object (a temporary one will be
2085 generated if not supplied)
2088 @return: dictionary of priority names -> id mappings
2092 q = session.query(Priority)
2094 ret[x.priority] = x.priority_id
2098 __all__.append('get_priorities')
2100 ################################################################################
2102 class Section(ORMObject):
2103 def __init__(self, section = None):
2104 self.section = section
2106 def properties(self):
2107 return ['section', 'section_id', 'overrides_count']
2109 def not_null_constraints(self):
2112 def __eq__(self, val):
2113 if isinstance(val, str):
2114 return (self.section == val)
2115 # This signals to use the normal comparison operator
2116 return NotImplemented
2118 def __ne__(self, val):
2119 if isinstance(val, str):
2120 return (self.section != val)
2121 # This signals to use the normal comparison operator
2122 return NotImplemented
2124 __all__.append('Section')
2127 def get_section(section, session=None):
2129 Returns Section object for given C{section name}.
2131 @type section: string
2132 @param section: The name of the section
2134 @type session: Session
2135 @param session: Optional SQLA session object (a temporary one will be
2136 generated if not supplied)
2139 @return: Section object for the given section name
2142 q = session.query(Section).filter_by(section=section)
2146 except NoResultFound:
2149 __all__.append('get_section')
2152 def get_sections(session=None):
2154 Returns dictionary of section names -> id mappings
2156 @type session: Session
2157 @param session: Optional SQL session object (a temporary one will be
2158 generated if not supplied)
2161 @return: dictionary of section names -> id mappings
2165 q = session.query(Section)
2167 ret[x.section] = x.section_id
2171 __all__.append('get_sections')
2173 ################################################################################
2175 class DBSource(ORMObject):
2176 def __init__(self, source = None, version = None, maintainer = None, \
2177 changedby = None, poolfile = None, install_date = None):
2178 self.source = source
2179 self.version = version
2180 self.maintainer = maintainer
2181 self.changedby = changedby
2182 self.poolfile = poolfile
2183 self.install_date = install_date
2185 def properties(self):
2186 return ['source', 'source_id', 'maintainer', 'changedby', \
2187 'fingerprint', 'poolfile', 'version', 'suites_count', \
2188 'install_date', 'binaries_count']
2190 def not_null_constraints(self):
2191 return ['source', 'version', 'install_date', 'maintainer', \
2192 'changedby', 'poolfile', 'install_date']
2194 def read_control(self):
2196 Reads the control information from a dsc
2199 @return: (stanza, controldict) stanza is the text of the control
2200 section. controldict is the information in a dictionary
2203 from debian.debfile import Deb822
2204 fullpath = self.poolfile.fullpath
2205 fields = Deb822(open(self.poolfile.fullpath, 'r'))
2208 __all__.append('DBSource')
2211 def source_exists(source, source_version, suites = ["any"], session=None):
2213 Ensure that source exists somewhere in the archive for the binary
2214 upload being processed.
2215 1. exact match => 1.0-3
2216 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2218 @type source: string
2219 @param source: source name
2221 @type source_version: string
2222 @param source_version: expected source version
2225 @param suites: list of suites to check in, default I{any}
2227 @type session: Session
2228 @param session: Optional SQLA session object (a temporary one will be
2229 generated if not supplied)
2232 @return: returns 1 if a source with expected version is found, otherwise 0
2239 from daklib.regexes import re_bin_only_nmu
2240 orig_source_version = re_bin_only_nmu.sub('', source_version)
2242 for suite in suites:
2243 q = session.query(DBSource).filter_by(source=source). \
2244 filter(DBSource.version.in_([source_version, orig_source_version]))
2246 # source must exist in suite X, or in some other suite that's
2247 # mapped to X, recursively... silent-maps are counted too,
2248 # unreleased-maps aren't.
2249 maps = cnf.ValueList("SuiteMappings")[:]
2251 maps = [ m.split() for m in maps ]
2252 maps = [ (x[1], x[2]) for x in maps
2253 if x[0] == "map" or x[0] == "silent-map" ]
2255 for (from_, to) in maps:
2256 if from_ in s and to not in s:
2259 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2264 # No source found so return not ok
2269 __all__.append('source_exists')
2272 def get_suites_source_in(source, session=None):
2274 Returns list of Suite objects which given C{source} name is in
2277 @param source: DBSource package name to search for
2280 @return: list of Suite objects for the given source
2283 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2285 __all__.append('get_suites_source_in')
2288 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2290 Returns list of DBSource objects for given C{source} name and other parameters
2293 @param source: DBSource package name to search for
2295 @type version: str or None
2296 @param version: DBSource version name to search for or None if not applicable
2298 @type dm_upload_allowed: bool
2299 @param dm_upload_allowed: If None, no effect. If True or False, only
2300 return packages with that dm_upload_allowed setting
2302 @type session: Session
2303 @param session: Optional SQL session object (a temporary one will be
2304 generated if not supplied)
2307 @return: list of DBSource objects for the given name (may be empty)
2310 q = session.query(DBSource).filter_by(source=source)
2312 if version is not None:
2313 q = q.filter_by(version=version)
2315 if dm_upload_allowed is not None:
2316 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2320 __all__.append('get_sources_from_name')
2322 # FIXME: This function fails badly if it finds more than 1 source package and
2323 # its implementation is trivial enough to be inlined.
2325 def get_source_in_suite(source, suite, session=None):
2327 Returns a DBSource object for a combination of C{source} and C{suite}.
2329 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2330 - B{suite} - a suite name, eg. I{unstable}
2332 @type source: string
2333 @param source: source package name
2336 @param suite: the suite name
2339 @return: the version for I{source} in I{suite}
2343 q = get_suite(suite, session).get_sources(source)
2346 except NoResultFound:
2349 __all__.append('get_source_in_suite')
2351 ################################################################################
2354 def add_dsc_to_db(u, filename, session=None):
2355 entry = u.pkg.files[filename]
2359 source.source = u.pkg.dsc["source"]
2360 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2361 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2362 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2363 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2364 source.install_date = datetime.now().date()
2366 dsc_component = entry["component"]
2367 dsc_location_id = entry["location id"]
2369 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2371 # Set up a new poolfile if necessary
2372 if not entry.has_key("files id") or not entry["files id"]:
2373 filename = entry["pool name"] + filename
2374 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2376 pfs.append(poolfile)
2377 entry["files id"] = poolfile.file_id
2379 source.poolfile_id = entry["files id"]
2382 suite_names = u.pkg.changes["distribution"].keys()
2383 source.suites = session.query(Suite). \
2384 filter(Suite.suite_name.in_(suite_names)).all()
2386 # Add the source files to the DB (files and dsc_files)
2388 dscfile.source_id = source.source_id
2389 dscfile.poolfile_id = entry["files id"]
2390 session.add(dscfile)
2392 for dsc_file, dentry in u.pkg.dsc_files.items():
2394 df.source_id = source.source_id
2396 # If the .orig tarball is already in the pool, it's
2397 # files id is stored in dsc_files by check_dsc().
2398 files_id = dentry.get("files id", None)
2400 # Find the entry in the files hash
2401 # TODO: Bail out here properly
2403 for f, e in u.pkg.files.items():
2408 if files_id is None:
2409 filename = dfentry["pool name"] + dsc_file
2411 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2412 # FIXME: needs to check for -1/-2 and or handle exception
2413 if found and obj is not None:
2414 files_id = obj.file_id
2417 # If still not found, add it
2418 if files_id is None:
2419 # HACK: Force sha1sum etc into dentry
2420 dentry["sha1sum"] = dfentry["sha1sum"]
2421 dentry["sha256sum"] = dfentry["sha256sum"]
2422 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2423 pfs.append(poolfile)
2424 files_id = poolfile.file_id
2426 poolfile = get_poolfile_by_id(files_id, session)
2427 if poolfile is None:
2428 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2429 pfs.append(poolfile)
2431 df.poolfile_id = files_id
2434 # Add the src_uploaders to the DB
2435 uploader_ids = [source.maintainer_id]
2436 if u.pkg.dsc.has_key("uploaders"):
2437 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2439 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2442 for up_id in uploader_ids:
2443 if added_ids.has_key(up_id):
2445 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2451 su.maintainer_id = up_id
2452 su.source_id = source.source_id
2457 return source, dsc_component, dsc_location_id, pfs
2459 __all__.append('add_dsc_to_db')
2462 def add_deb_to_db(u, filename, session=None):
2464 Contrary to what you might expect, this routine deals with both
2465 debs and udebs. That info is in 'dbtype', whilst 'type' is
2466 'deb' for both of them
2469 entry = u.pkg.files[filename]
2472 bin.package = entry["package"]
2473 bin.version = entry["version"]
2474 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2475 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2476 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2477 bin.binarytype = entry["dbtype"]
2480 filename = entry["pool name"] + filename
2481 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2482 if not entry.get("location id", None):
2483 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2485 if entry.get("files id", None):
2486 poolfile = get_poolfile_by_id(bin.poolfile_id)
2487 bin.poolfile_id = entry["files id"]
2489 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2490 bin.poolfile_id = entry["files id"] = poolfile.file_id
2493 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2494 if len(bin_sources) != 1:
2495 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2496 (bin.package, bin.version, entry["architecture"],
2497 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2499 bin.source_id = bin_sources[0].source_id
2501 if entry.has_key("built-using"):
2502 for srcname, version in entry["built-using"]:
2503 exsources = get_sources_from_name(srcname, version, session=session)
2504 if len(exsources) != 1:
2505 raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2506 (srcname, version, bin.package, bin.version, entry["architecture"],
2507 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2509 bin.extra_sources.append(exsources[0])
2511 # Add and flush object so it has an ID
2514 suite_names = u.pkg.changes["distribution"].keys()
2515 bin.suites = session.query(Suite). \
2516 filter(Suite.suite_name.in_(suite_names)).all()
2520 # Deal with contents - disabled for now
2521 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2523 # print "REJECT\nCould not determine contents of package %s" % bin.package
2524 # session.rollback()
2525 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2529 __all__.append('add_deb_to_db')
2531 ################################################################################
2533 class SourceACL(object):
2534 def __init__(self, *args, **kwargs):
2538 return '<SourceACL %s>' % self.source_acl_id
2540 __all__.append('SourceACL')
2542 ################################################################################
2544 class SrcFormat(object):
2545 def __init__(self, *args, **kwargs):
2549 return '<SrcFormat %s>' % (self.format_name)
2551 __all__.append('SrcFormat')
2553 ################################################################################
2555 class SrcUploader(object):
2556 def __init__(self, *args, **kwargs):
2560 return '<SrcUploader %s>' % self.uploader_id
2562 __all__.append('SrcUploader')
2564 ################################################################################
2566 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2567 ('SuiteID', 'suite_id'),
2568 ('Version', 'version'),
2569 ('Origin', 'origin'),
2571 ('Description', 'description'),
2572 ('Untouchable', 'untouchable'),
2573 ('Announce', 'announce'),
2574 ('Codename', 'codename'),
2575 ('OverrideCodename', 'overridecodename'),
2576 ('ValidTime', 'validtime'),
2577 ('Priority', 'priority'),
2578 ('NotAutomatic', 'notautomatic'),
2579 ('CopyChanges', 'copychanges'),
2580 ('OverrideSuite', 'overridesuite')]
2582 # Why the heck don't we have any UNIQUE constraints in table suite?
2583 # TODO: Add UNIQUE constraints for appropriate columns.
2584 class Suite(ORMObject):
2585 def __init__(self, suite_name = None, version = None):
2586 self.suite_name = suite_name
2587 self.version = version
2589 def properties(self):
2590 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2593 def not_null_constraints(self):
2594 return ['suite_name', 'version']
2596 def __eq__(self, val):
2597 if isinstance(val, str):
2598 return (self.suite_name == val)
2599 # This signals to use the normal comparison operator
2600 return NotImplemented
2602 def __ne__(self, val):
2603 if isinstance(val, str):
2604 return (self.suite_name != val)
2605 # This signals to use the normal comparison operator
2606 return NotImplemented
2610 for disp, field in SUITE_FIELDS:
2611 val = getattr(self, field, None)
2613 ret.append("%s: %s" % (disp, val))
2615 return "\n".join(ret)
2617 def get_architectures(self, skipsrc=False, skipall=False):
2619 Returns list of Architecture objects
2621 @type skipsrc: boolean
2622 @param skipsrc: Whether to skip returning the 'source' architecture entry
2625 @type skipall: boolean
2626 @param skipall: Whether to skip returning the 'all' architecture entry
2630 @return: list of Architecture objects for the given name (may be empty)
2633 q = object_session(self).query(Architecture).with_parent(self)
2635 q = q.filter(Architecture.arch_string != 'source')
2637 q = q.filter(Architecture.arch_string != 'all')
2638 return q.order_by(Architecture.arch_string).all()
2640 def get_sources(self, source):
2642 Returns a query object representing DBSource that is part of C{suite}.
2644 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2646 @type source: string
2647 @param source: source package name
2649 @rtype: sqlalchemy.orm.query.Query
2650 @return: a query of DBSource
2654 session = object_session(self)
2655 return session.query(DBSource).filter_by(source = source). \
2658 __all__.append('Suite')
2661 def get_suite(suite, session=None):
2663 Returns Suite object for given C{suite name}.
2666 @param suite: The name of the suite
2668 @type session: Session
2669 @param session: Optional SQLA session object (a temporary one will be
2670 generated if not supplied)
2673 @return: Suite object for the requested suite name (None if not present)
2676 q = session.query(Suite).filter_by(suite_name=suite)
2680 except NoResultFound:
2683 __all__.append('get_suite')
2685 ################################################################################
2687 # TODO: should be removed because the implementation is too trivial
2689 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2691 Returns list of Architecture objects for given C{suite} name
2694 @param suite: Suite name to search for
2696 @type skipsrc: boolean
2697 @param skipsrc: Whether to skip returning the 'source' architecture entry
2700 @type skipall: boolean
2701 @param skipall: Whether to skip returning the 'all' architecture entry
2704 @type session: Session
2705 @param session: Optional SQL session object (a temporary one will be
2706 generated if not supplied)
2709 @return: list of Architecture objects for the given name (may be empty)
2712 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2714 __all__.append('get_suite_architectures')
2716 ################################################################################
2718 class SuiteSrcFormat(object):
2719 def __init__(self, *args, **kwargs):
2723 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2725 __all__.append('SuiteSrcFormat')
2728 def get_suite_src_formats(suite, session=None):
2730 Returns list of allowed SrcFormat for C{suite}.
2733 @param suite: Suite name to search for
2735 @type session: Session
2736 @param session: Optional SQL session object (a temporary one will be
2737 generated if not supplied)
2740 @return: the list of allowed source formats for I{suite}
2743 q = session.query(SrcFormat)
2744 q = q.join(SuiteSrcFormat)
2745 q = q.join(Suite).filter_by(suite_name=suite)
2746 q = q.order_by('format_name')
2750 __all__.append('get_suite_src_formats')
2752 ################################################################################
2754 class Uid(ORMObject):
2755 def __init__(self, uid = None, name = None):
2759 def __eq__(self, val):
2760 if isinstance(val, str):
2761 return (self.uid == val)
2762 # This signals to use the normal comparison operator
2763 return NotImplemented
2765 def __ne__(self, val):
2766 if isinstance(val, str):
2767 return (self.uid != val)
2768 # This signals to use the normal comparison operator
2769 return NotImplemented
2771 def properties(self):
2772 return ['uid', 'name', 'fingerprint']
2774 def not_null_constraints(self):
2777 __all__.append('Uid')
2780 def get_or_set_uid(uidname, session=None):
2782 Returns uid object for given uidname.
2784 If no matching uidname is found, a row is inserted.
2786 @type uidname: string
2787 @param uidname: The uid to add
2789 @type session: SQLAlchemy
2790 @param session: Optional SQL session object (a temporary one will be
2791 generated if not supplied). If not passed, a commit will be performed at
2792 the end of the function, otherwise the caller is responsible for commiting.
2795 @return: the uid object for the given uidname
2798 q = session.query(Uid).filter_by(uid=uidname)
2802 except NoResultFound:
2806 session.commit_or_flush()
2811 __all__.append('get_or_set_uid')
2814 def get_uid_from_fingerprint(fpr, session=None):
2815 q = session.query(Uid)
2816 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2820 except NoResultFound:
2823 __all__.append('get_uid_from_fingerprint')
2825 ################################################################################
2827 class UploadBlock(object):
2828 def __init__(self, *args, **kwargs):
2832 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2834 __all__.append('UploadBlock')
2836 ################################################################################
2838 class MetadataKey(ORMObject):
2839 def __init__(self, key = None):
2842 def properties(self):
2845 def not_null_constraints(self):
2848 __all__.append('MetadataKey')
2850 ################################################################################
2852 class BinaryMetadata(ORMObject):
2853 def __init__(self, binary = None, key = None, value = None):
2854 self.binary = binary
2858 def properties(self):
2859 return ['binary', 'key', 'value']
2861 def not_null_constraints(self):
2864 __all__.append('BinaryMetadata')
2866 ################################################################################
2868 class SourceMetadata(ORMObject):
2869 def __init__(self, source = None, key = None, value = None):
2870 self.source = source
2874 def properties(self):
2875 return ['source', 'key', 'value']
2877 def not_null_constraints(self):
2880 __all__.append('SourceMetadata')
2882 ################################################################################
2884 class DBConn(object):
2886 database module init.
2890 def __init__(self, *args, **kwargs):
2891 self.__dict__ = self.__shared_state
2893 if not getattr(self, 'initialised', False):
2894 self.initialised = True
2895 self.debug = kwargs.has_key('debug')
2898 def __setuptables(self):
2905 'binaries_metadata',
2909 'build_queue_files',
2914 'changes_pending_binaries',
2915 'changes_pending_files',
2916 'changes_pending_source',
2917 'changes_pending_files_map',
2918 'changes_pending_source_files',
2919 'changes_pool_files',
2921 'extra_src_references',
2930 # TODO: the maintainer column in table override should be removed.
2943 'suite_architectures',
2944 'suite_build_queue_copy',
2945 'suite_src_formats',
2951 'almost_obsolete_all_associations',
2952 'almost_obsolete_src_associations',
2953 'any_associations_source',
2954 'bin_assoc_by_arch',
2955 'bin_associations_binaries',
2956 'binaries_suite_arch',
2957 'binfiles_suite_component_arch',
2960 'newest_all_associations',
2961 'newest_any_associations',
2963 'newest_src_association',
2964 'obsolete_all_associations',
2965 'obsolete_any_associations',
2966 'obsolete_any_by_all_associations',
2967 'obsolete_src_associations',
2969 'src_associations_bin',
2970 'src_associations_src',
2971 'suite_arch_by_name',
2974 for table_name in tables:
2975 table = Table(table_name, self.db_meta, \
2976 autoload=True, useexisting=True)
2977 setattr(self, 'tbl_%s' % table_name, table)
2979 for view_name in views:
2980 view = Table(view_name, self.db_meta, autoload=True)
2981 setattr(self, 'view_%s' % view_name, view)
2983 def __setupmappers(self):
2984 mapper(Architecture, self.tbl_architecture,
2985 properties = dict(arch_id = self.tbl_architecture.c.id,
2986 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2987 order_by='suite_name',
2988 backref=backref('architectures', order_by='arch_string'))),
2989 extension = validator)
2991 mapper(Archive, self.tbl_archive,
2992 properties = dict(archive_id = self.tbl_archive.c.id,
2993 archive_name = self.tbl_archive.c.name))
2995 mapper(BuildQueue, self.tbl_build_queue,
2996 properties = dict(queue_id = self.tbl_build_queue.c.id))
2998 mapper(BuildQueueFile, self.tbl_build_queue_files,
2999 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3000 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3002 mapper(DBBinary, self.tbl_binaries,
3003 properties = dict(binary_id = self.tbl_binaries.c.id,
3004 package = self.tbl_binaries.c.package,
3005 version = self.tbl_binaries.c.version,
3006 maintainer_id = self.tbl_binaries.c.maintainer,
3007 maintainer = relation(Maintainer),
3008 source_id = self.tbl_binaries.c.source,
3009 source = relation(DBSource, backref='binaries'),
3010 arch_id = self.tbl_binaries.c.architecture,
3011 architecture = relation(Architecture),
3012 poolfile_id = self.tbl_binaries.c.file,
3013 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3014 binarytype = self.tbl_binaries.c.type,
3015 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3016 fingerprint = relation(Fingerprint),
3017 install_date = self.tbl_binaries.c.install_date,
3018 suites = relation(Suite, secondary=self.tbl_bin_associations,
3019 backref=backref('binaries', lazy='dynamic')),
3020 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3021 backref=backref('extra_binary_references', lazy='dynamic'))),
3022 extension = validator)
3024 mapper(BinaryACL, self.tbl_binary_acl,
3025 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3027 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3028 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3029 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3030 architecture = relation(Architecture)))
3032 mapper(Component, self.tbl_component,
3033 properties = dict(component_id = self.tbl_component.c.id,
3034 component_name = self.tbl_component.c.name),
3035 extension = validator)
3037 mapper(DBConfig, self.tbl_config,
3038 properties = dict(config_id = self.tbl_config.c.id))
3040 mapper(DSCFile, self.tbl_dsc_files,
3041 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3042 source_id = self.tbl_dsc_files.c.source,
3043 source = relation(DBSource),
3044 poolfile_id = self.tbl_dsc_files.c.file,
3045 poolfile = relation(PoolFile)))
3047 mapper(PoolFile, self.tbl_files,
3048 properties = dict(file_id = self.tbl_files.c.id,
3049 filesize = self.tbl_files.c.size,
3050 location_id = self.tbl_files.c.location,
3051 location = relation(Location,
3052 # using lazy='dynamic' in the back
3053 # reference because we have A LOT of
3054 # files in one location
3055 backref=backref('files', lazy='dynamic'))),
3056 extension = validator)
3058 mapper(Fingerprint, self.tbl_fingerprint,
3059 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3060 uid_id = self.tbl_fingerprint.c.uid,
3061 uid = relation(Uid),
3062 keyring_id = self.tbl_fingerprint.c.keyring,
3063 keyring = relation(Keyring),
3064 source_acl = relation(SourceACL),
3065 binary_acl = relation(BinaryACL)),
3066 extension = validator)
3068 mapper(Keyring, self.tbl_keyrings,
3069 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3070 keyring_id = self.tbl_keyrings.c.id))
3072 mapper(DBChange, self.tbl_changes,
3073 properties = dict(change_id = self.tbl_changes.c.id,
3074 poolfiles = relation(PoolFile,
3075 secondary=self.tbl_changes_pool_files,
3076 backref="changeslinks"),
3077 seen = self.tbl_changes.c.seen,
3078 source = self.tbl_changes.c.source,
3079 binaries = self.tbl_changes.c.binaries,
3080 architecture = self.tbl_changes.c.architecture,
3081 distribution = self.tbl_changes.c.distribution,
3082 urgency = self.tbl_changes.c.urgency,
3083 maintainer = self.tbl_changes.c.maintainer,
3084 changedby = self.tbl_changes.c.changedby,
3085 date = self.tbl_changes.c.date,
3086 version = self.tbl_changes.c.version,
3087 files = relation(ChangePendingFile,
3088 secondary=self.tbl_changes_pending_files_map,
3089 backref="changesfile"),
3090 in_queue_id = self.tbl_changes.c.in_queue,
3091 in_queue = relation(PolicyQueue,
3092 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3093 approved_for_id = self.tbl_changes.c.approved_for))
3095 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3096 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3098 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3099 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3100 filename = self.tbl_changes_pending_files.c.filename,
3101 size = self.tbl_changes_pending_files.c.size,
3102 md5sum = self.tbl_changes_pending_files.c.md5sum,
3103 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3104 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3106 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3107 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3108 change = relation(DBChange),
3109 maintainer = relation(Maintainer,
3110 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3111 changedby = relation(Maintainer,
3112 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3113 fingerprint = relation(Fingerprint),
3114 source_files = relation(ChangePendingFile,
3115 secondary=self.tbl_changes_pending_source_files,
3116 backref="pending_sources")))
3119 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3120 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3121 keyring = relation(Keyring, backref="keyring_acl_map"),
3122 architecture = relation(Architecture)))
3124 mapper(Location, self.tbl_location,
3125 properties = dict(location_id = self.tbl_location.c.id,
3126 component_id = self.tbl_location.c.component,
3127 component = relation(Component, backref='location'),
3128 archive_id = self.tbl_location.c.archive,
3129 archive = relation(Archive),
3130 # FIXME: the 'type' column is old cruft and
3131 # should be removed in the future.
3132 archive_type = self.tbl_location.c.type),
3133 extension = validator)
3135 mapper(Maintainer, self.tbl_maintainer,
3136 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3137 maintains_sources = relation(DBSource, backref='maintainer',
3138 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3139 changed_sources = relation(DBSource, backref='changedby',
3140 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3141 extension = validator)
3143 mapper(NewComment, self.tbl_new_comments,
3144 properties = dict(comment_id = self.tbl_new_comments.c.id))
3146 mapper(Override, self.tbl_override,
3147 properties = dict(suite_id = self.tbl_override.c.suite,
3148 suite = relation(Suite, \
3149 backref=backref('overrides', lazy='dynamic')),
3150 package = self.tbl_override.c.package,
3151 component_id = self.tbl_override.c.component,
3152 component = relation(Component, \
3153 backref=backref('overrides', lazy='dynamic')),
3154 priority_id = self.tbl_override.c.priority,
3155 priority = relation(Priority, \
3156 backref=backref('overrides', lazy='dynamic')),
3157 section_id = self.tbl_override.c.section,
3158 section = relation(Section, \
3159 backref=backref('overrides', lazy='dynamic')),
3160 overridetype_id = self.tbl_override.c.type,
3161 overridetype = relation(OverrideType, \
3162 backref=backref('overrides', lazy='dynamic'))))
3164 mapper(OverrideType, self.tbl_override_type,
3165 properties = dict(overridetype = self.tbl_override_type.c.type,
3166 overridetype_id = self.tbl_override_type.c.id))
3168 mapper(PolicyQueue, self.tbl_policy_queue,
3169 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3171 mapper(Priority, self.tbl_priority,
3172 properties = dict(priority_id = self.tbl_priority.c.id))
3174 mapper(Section, self.tbl_section,
3175 properties = dict(section_id = self.tbl_section.c.id,
3176 section=self.tbl_section.c.section))
3178 mapper(DBSource, self.tbl_source,
3179 properties = dict(source_id = self.tbl_source.c.id,
3180 version = self.tbl_source.c.version,
3181 maintainer_id = self.tbl_source.c.maintainer,
3182 poolfile_id = self.tbl_source.c.file,
3183 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3184 fingerprint_id = self.tbl_source.c.sig_fpr,
3185 fingerprint = relation(Fingerprint),
3186 changedby_id = self.tbl_source.c.changedby,
3187 srcfiles = relation(DSCFile,
3188 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3189 suites = relation(Suite, secondary=self.tbl_src_associations,
3190 backref=backref('sources', lazy='dynamic')),
3191 srcuploaders = relation(SrcUploader)),
3192 extension = validator)
3194 mapper(SourceACL, self.tbl_source_acl,
3195 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3197 mapper(SrcFormat, self.tbl_src_format,
3198 properties = dict(src_format_id = self.tbl_src_format.c.id,
3199 format_name = self.tbl_src_format.c.format_name))
3201 mapper(SrcUploader, self.tbl_src_uploaders,
3202 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3203 source_id = self.tbl_src_uploaders.c.source,
3204 source = relation(DBSource,
3205 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3206 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3207 maintainer = relation(Maintainer,
3208 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3210 mapper(Suite, self.tbl_suite,
3211 properties = dict(suite_id = self.tbl_suite.c.id,
3212 policy_queue = relation(PolicyQueue),
3213 copy_queues = relation(BuildQueue,
3214 secondary=self.tbl_suite_build_queue_copy)),
3215 extension = validator)
3217 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3218 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3219 suite = relation(Suite, backref='suitesrcformats'),
3220 src_format_id = self.tbl_suite_src_formats.c.src_format,
3221 src_format = relation(SrcFormat)))
3223 mapper(Uid, self.tbl_uid,
3224 properties = dict(uid_id = self.tbl_uid.c.id,
3225 fingerprint = relation(Fingerprint)),
3226 extension = validator)
3228 mapper(UploadBlock, self.tbl_upload_blocks,
3229 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3230 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3231 uid = relation(Uid, backref="uploadblocks")))
3233 mapper(BinContents, self.tbl_bin_contents,
3235 binary = relation(DBBinary,
3236 backref=backref('contents', lazy='dynamic', cascade='all')),
3237 file = self.tbl_bin_contents.c.file))
3239 mapper(MetadataKey, self.tbl_metadata_keys,
3241 key_id = self.tbl_metadata_keys.c.key_id,
3242 key = self.tbl_metadata_keys.c.key))
3244 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3246 binary_id = self.tbl_binaries_metadata.c.bin_id,
3247 binary = relation(DBBinary),
3248 key_id = self.tbl_binaries_metadata.c.key_id,
3249 key = relation(MetadataKey),
3250 value = self.tbl_binaries_metadata.c.value))
3252 mapper(SourceMetadata, self.tbl_source_metadata,
3254 source_id = self.tbl_source_metadata.c.src_id,
3255 source = relation(DBSource),
3256 key_id = self.tbl_source_metadata.c.key_id,
3257 key = relation(MetadataKey),
3258 value = self.tbl_source_metadata.c.value))
3260 ## Connection functions
3261 def __createconn(self):
3262 from config import Config
3264 if cnf.has_key("DB::Service"):
3265 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3266 elif cnf.has_key("DB::Host"):
3268 connstr = "postgresql://%s" % cnf["DB::Host"]
3269 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3270 connstr += ":%s" % cnf["DB::Port"]
3271 connstr += "/%s" % cnf["DB::Name"]
3274 connstr = "postgresql:///%s" % cnf["DB::Name"]
3275 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3276 connstr += "?port=%s" % cnf["DB::Port"]
3278 engine_args = { 'echo': self.debug }
3279 if cnf.has_key('DB::PoolSize'):
3280 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3281 if cnf.has_key('DB::MaxOverflow'):
3282 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3283 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3284 cnf['DB::Unicode'] == 'false':
3285 engine_args['use_native_unicode'] = False
3287 # Monkey patch a new dialect in in order to support service= syntax
3288 import sqlalchemy.dialects.postgresql
3289 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3290 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3291 def create_connect_args(self, url):
3292 if str(url).startswith('postgresql://service='):
3294 servicename = str(url)[21:]
3295 return (['service=%s' % servicename], {})
3297 return PGDialect_psycopg2.create_connect_args(self, url)
3299 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3301 self.db_pg = create_engine(connstr, **engine_args)
3302 self.db_meta = MetaData()
3303 self.db_meta.bind = self.db_pg
3304 self.db_smaker = sessionmaker(bind=self.db_pg,
3308 self.__setuptables()
3309 self.__setupmappers()
3310 self.pid = os.getpid()
3313 # reinitialize DBConn in new processes
3314 if self.pid != os.getpid():
3317 return self.db_smaker()
3319 __all__.append('DBConn')