5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 from os.path import normpath
48 import simplejson as json
50 from datetime import datetime, timedelta
51 from errno import ENOENT
52 from tempfile import mkstemp, mkdtemp
53 from subprocess import Popen, PIPE
54 from tarfile import TarFile
56 from inspect import getargspec
59 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
61 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
62 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
63 from sqlalchemy import types as sqltypes
64 from sqlalchemy.orm.collections import attribute_mapped_collection
65 from sqlalchemy.ext.associationproxy import association_proxy
67 # Don't remove this, we re-export the exceptions to scripts which import us
68 from sqlalchemy.exc import *
69 from sqlalchemy.orm.exc import NoResultFound
71 # Only import Config until Queue stuff is changed to store its config
73 from config import Config
74 from textutils import fix_maintainer
75 from dak_exceptions import DBUpdateError, NoSourceFieldError
77 # suppress some deprecation warnings in squeeze related to sqlalchemy
79 warnings.filterwarnings('ignore', \
80 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
84 ################################################################################
86 # Patch in support for the debversion field type so that it works during
90 # that is for sqlalchemy 0.6
91 UserDefinedType = sqltypes.UserDefinedType
93 # this one for sqlalchemy 0.5
94 UserDefinedType = sqltypes.TypeEngine
96 class DebVersion(UserDefinedType):
97 def get_col_spec(self):
100 def bind_processor(self, dialect):
103 # ' = None' is needed for sqlalchemy 0.5:
104 def result_processor(self, dialect, coltype = None):
107 sa_major_version = sqlalchemy.__version__[0:3]
108 if sa_major_version in ["0.5", "0.6"]:
109 from sqlalchemy.databases import postgres
110 postgres.ischema_names['debversion'] = DebVersion
112 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
114 ################################################################################
116 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
118 ################################################################################
120 def session_wrapper(fn):
122 Wrapper around common ".., session=None):" handling. If the wrapped
123 function is called without passing 'session', we create a local one
124 and destroy it when the function ends.
126 Also attaches a commit_or_flush method to the session; if we created a
127 local session, this is a synonym for session.commit(), otherwise it is a
128 synonym for session.flush().
131 def wrapped(*args, **kwargs):
132 private_transaction = False
134 # Find the session object
135 session = kwargs.get('session')
138 if len(args) <= len(getargspec(fn)[0]) - 1:
139 # No session specified as last argument or in kwargs
140 private_transaction = True
141 session = kwargs['session'] = DBConn().session()
143 # Session is last argument in args
147 session = args[-1] = DBConn().session()
148 private_transaction = True
150 if private_transaction:
151 session.commit_or_flush = session.commit
153 session.commit_or_flush = session.flush
156 return fn(*args, **kwargs)
158 if private_transaction:
159 # We created a session; close it.
162 wrapped.__doc__ = fn.__doc__
163 wrapped.func_name = fn.func_name
167 __all__.append('session_wrapper')
169 ################################################################################
171 class ORMObject(object):
173 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
174 derived classes must implement the properties() method.
177 def properties(self):
179 This method should be implemented by all derived classes and returns a
180 list of the important properties. The properties 'created' and
181 'modified' will be added automatically. A suffix '_count' should be
182 added to properties that are lists or query objects. The most important
183 property name should be returned as the first element in the list
184 because it is used by repr().
190 Returns a JSON representation of the object based on the properties
191 returned from the properties() method.
194 # add created and modified
195 all_properties = self.properties() + ['created', 'modified']
196 for property in all_properties:
197 # check for list or query
198 if property[-6:] == '_count':
199 real_property = property[:-6]
200 if not hasattr(self, real_property):
202 value = getattr(self, real_property)
203 if hasattr(value, '__len__'):
206 elif hasattr(value, 'count'):
207 # query (but not during validation)
208 if self.in_validation:
210 value = value.count()
212 raise KeyError('Do not understand property %s.' % property)
214 if not hasattr(self, property):
217 value = getattr(self, property)
221 elif isinstance(value, ORMObject):
222 # use repr() for ORMObject types
225 # we want a string for all other types because json cannot
228 data[property] = value
229 return json.dumps(data)
233 Returns the name of the class.
235 return type(self).__name__
239 Returns a short string representation of the object using the first
240 element from the properties() method.
242 primary_property = self.properties()[0]
243 value = getattr(self, primary_property)
244 return '<%s %s>' % (self.classname(), str(value))
248 Returns a human readable form of the object using the properties()
251 return '<%s %s>' % (self.classname(), self.json())
253 def not_null_constraints(self):
255 Returns a list of properties that must be not NULL. Derived classes
256 should override this method if needed.
260 validation_message = \
261 "Validation failed because property '%s' must not be empty in object\n%s"
263 in_validation = False
267 This function validates the not NULL constraints as returned by
268 not_null_constraints(). It raises the DBUpdateError exception if
271 for property in self.not_null_constraints():
272 # TODO: It is a bit awkward that the mapper configuration allow
273 # directly setting the numeric _id columns. We should get rid of it
275 if hasattr(self, property + '_id') and \
276 getattr(self, property + '_id') is not None:
278 if not hasattr(self, property) or getattr(self, property) is None:
279 # str() might lead to races due to a 2nd flush
280 self.in_validation = True
281 message = self.validation_message % (property, str(self))
282 self.in_validation = False
283 raise DBUpdateError(message)
287 def get(cls, primary_key, session = None):
289 This is a support function that allows getting an object by its primary
292 Architecture.get(3[, session])
294 instead of the more verbose
296 session.query(Architecture).get(3)
298 return session.query(cls).get(primary_key)
300 def session(self, replace = False):
302 Returns the current session that is associated with the object. May
303 return None is object is in detached state.
306 return object_session(self)
308 def clone(self, session = None):
310 Clones the current object in a new session and returns the new clone. A
311 fresh session is created if the optional session parameter is not
312 provided. The function will fail if a session is provided and has
315 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
316 an existing object to allow several threads to work with their own
317 instances of an ORMObject.
319 WARNING: Only persistent (committed) objects can be cloned. Changes
320 made to the original object that are not committed yet will get lost.
321 The session of the new object will always be rolled back to avoid
325 if self.session() is None:
326 raise RuntimeError( \
327 'Method clone() failed for detached object:\n%s' % self)
328 self.session().flush()
329 mapper = object_mapper(self)
330 primary_key = mapper.primary_key_from_instance(self)
331 object_class = self.__class__
333 session = DBConn().session()
334 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
335 raise RuntimeError( \
336 'Method clone() failed due to unflushed changes in session.')
337 new_object = session.query(object_class).get(primary_key)
339 if new_object is None:
340 raise RuntimeError( \
341 'Method clone() failed for non-persistent object:\n%s' % self)
344 __all__.append('ORMObject')
346 ################################################################################
348 class Validator(MapperExtension):
350 This class calls the validate() method for each instance for the
351 'before_update' and 'before_insert' events. A global object validator is
352 used for configuring the individual mappers.
355 def before_update(self, mapper, connection, instance):
359 def before_insert(self, mapper, connection, instance):
363 validator = Validator()
365 ################################################################################
367 class Architecture(ORMObject):
368 def __init__(self, arch_string = None, description = None):
369 self.arch_string = arch_string
370 self.description = description
372 def __eq__(self, val):
373 if isinstance(val, str):
374 return (self.arch_string== val)
375 # This signals to use the normal comparison operator
376 return NotImplemented
378 def __ne__(self, val):
379 if isinstance(val, str):
380 return (self.arch_string != val)
381 # This signals to use the normal comparison operator
382 return NotImplemented
384 def properties(self):
385 return ['arch_string', 'arch_id', 'suites_count']
387 def not_null_constraints(self):
388 return ['arch_string']
390 __all__.append('Architecture')
393 def get_architecture(architecture, session=None):
395 Returns database id for given C{architecture}.
397 @type architecture: string
398 @param architecture: The name of the architecture
400 @type session: Session
401 @param session: Optional SQLA session object (a temporary one will be
402 generated if not supplied)
405 @return: Architecture object for the given arch (None if not present)
408 q = session.query(Architecture).filter_by(arch_string=architecture)
412 except NoResultFound:
415 __all__.append('get_architecture')
417 # TODO: should be removed because the implementation is too trivial
419 def get_architecture_suites(architecture, session=None):
421 Returns list of Suite objects for given C{architecture} name
423 @type architecture: str
424 @param architecture: Architecture name to search for
426 @type session: Session
427 @param session: Optional SQL session object (a temporary one will be
428 generated if not supplied)
431 @return: list of Suite objects for the given name (may be empty)
434 return get_architecture(architecture, session).suites
436 __all__.append('get_architecture_suites')
438 ################################################################################
440 class Archive(object):
441 def __init__(self, *args, **kwargs):
445 return '<Archive %s>' % self.archive_name
447 __all__.append('Archive')
450 def get_archive(archive, session=None):
452 returns database id for given C{archive}.
454 @type archive: string
455 @param archive: the name of the arhive
457 @type session: Session
458 @param session: Optional SQLA session object (a temporary one will be
459 generated if not supplied)
462 @return: Archive object for the given name (None if not present)
465 archive = archive.lower()
467 q = session.query(Archive).filter_by(archive_name=archive)
471 except NoResultFound:
474 __all__.append('get_archive')
476 ################################################################################
478 class BinContents(ORMObject):
479 def __init__(self, file = None, binary = None):
483 def properties(self):
484 return ['file', 'binary']
486 __all__.append('BinContents')
488 ################################################################################
490 class DBBinary(ORMObject):
491 def __init__(self, package = None, source = None, version = None, \
492 maintainer = None, architecture = None, poolfile = None, \
494 self.package = package
496 self.version = version
497 self.maintainer = maintainer
498 self.architecture = architecture
499 self.poolfile = poolfile
500 self.binarytype = binarytype
504 return self.binary_id
506 def properties(self):
507 return ['package', 'version', 'maintainer', 'source', 'architecture', \
508 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
509 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
511 def not_null_constraints(self):
512 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
515 metadata = association_proxy('key', 'value')
517 def get_component_name(self):
518 return self.poolfile.location.component.component_name
520 def scan_contents(self):
522 Yields the contents of the package. Only regular files are yielded and
523 the path names are normalized after converting them from either utf-8
524 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
525 package does not contain any regular file.
527 fullpath = self.poolfile.fullpath
528 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE)
529 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
530 for member in tar.getmembers():
531 if not member.isdir():
532 name = normpath(member.name)
533 # enforce proper utf-8 encoding
536 except UnicodeDecodeError:
537 name = name.decode('iso8859-1').encode('utf-8')
543 def read_control(self):
545 Reads the control information from a binary.
548 @return: stanza text of the control section.
551 fullpath = self.poolfile.fullpath
552 deb_file = open(fullpath, 'r')
553 stanza = apt_inst.debExtractControl(deb_file)
558 def read_control_fields(self):
560 Reads the control information from a binary and return
564 @return: fields of the control section as a dictionary.
567 stanza = self.read_control()
568 return apt_pkg.TagSection(stanza)
570 __all__.append('DBBinary')
573 def get_suites_binary_in(package, session=None):
575 Returns list of Suite objects which given C{package} name is in
578 @param package: DBBinary package name to search for
581 @return: list of Suite objects for the given package
584 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
586 __all__.append('get_suites_binary_in')
589 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
591 Returns the component name of the newest binary package in suite_list or
592 None if no package is found. The result can be optionally filtered by a list
593 of architecture names.
596 @param package: DBBinary package name to search for
598 @type suite_list: list of str
599 @param suite_list: list of suite_name items
601 @type arch_list: list of str
602 @param arch_list: optional list of arch_string items that defaults to []
604 @rtype: str or NoneType
605 @return: name of component or None
608 q = session.query(DBBinary).filter_by(package = package). \
609 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
610 if len(arch_list) > 0:
611 q = q.join(DBBinary.architecture). \
612 filter(Architecture.arch_string.in_(arch_list))
613 binary = q.order_by(desc(DBBinary.version)).first()
617 return binary.get_component_name()
619 __all__.append('get_component_by_package_suite')
621 ################################################################################
623 class BinaryACL(object):
624 def __init__(self, *args, **kwargs):
628 return '<BinaryACL %s>' % self.binary_acl_id
630 __all__.append('BinaryACL')
632 ################################################################################
634 class BinaryACLMap(object):
635 def __init__(self, *args, **kwargs):
639 return '<BinaryACLMap %s>' % self.binary_acl_map_id
641 __all__.append('BinaryACLMap')
643 ################################################################################
648 ArchiveDir "%(archivepath)s";
649 OverrideDir "%(overridedir)s";
650 CacheDir "%(cachedir)s";
655 Packages::Compress ". bzip2 gzip";
656 Sources::Compress ". bzip2 gzip";
661 bindirectory "incoming"
666 BinOverride "override.sid.all3";
667 BinCacheDB "packages-accepted.db";
669 FileList "%(filelist)s";
672 Packages::Extensions ".deb .udeb";
675 bindirectory "incoming/"
678 BinOverride "override.sid.all3";
679 SrcOverride "override.sid.all3.src";
680 FileList "%(filelist)s";
684 class BuildQueue(object):
685 def __init__(self, *args, **kwargs):
689 return '<BuildQueue %s>' % self.queue_name
691 def write_metadata(self, starttime, force=False):
692 # Do we write out metafiles?
693 if not (force or self.generate_metadata):
696 session = DBConn().session().object_session(self)
698 fl_fd = fl_name = ac_fd = ac_name = None
700 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
701 startdir = os.getcwd()
704 # Grab files we want to include
705 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
706 # Write file list with newer files
707 (fl_fd, fl_name) = mkstemp()
709 os.write(fl_fd, '%s\n' % n.fullpath)
714 # Write minimal apt.conf
715 # TODO: Remove hardcoding from template
716 (ac_fd, ac_name) = mkstemp()
717 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
719 'cachedir': cnf["Dir::Cache"],
720 'overridedir': cnf["Dir::Override"],
724 # Run apt-ftparchive generate
725 os.chdir(os.path.dirname(ac_name))
726 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
728 # Run apt-ftparchive release
729 # TODO: Eww - fix this
730 bname = os.path.basename(self.path)
734 # We have to remove the Release file otherwise it'll be included in the
737 os.unlink(os.path.join(bname, 'Release'))
741 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
743 # Crude hack with open and append, but this whole section is and should be redone.
744 if self.notautomatic:
745 release=open("Release", "a")
746 release.write("NotAutomatic: yes")
751 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
752 if cnf.has_key("Dinstall::SigningPubKeyring"):
753 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
755 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
757 # Move the files if we got this far
758 os.rename('Release', os.path.join(bname, 'Release'))
760 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
762 # Clean up any left behind files
789 def clean_and_update(self, starttime, Logger, dryrun=False):
790 """WARNING: This routine commits for you"""
791 session = DBConn().session().object_session(self)
793 if self.generate_metadata and not dryrun:
794 self.write_metadata(starttime)
796 # Grab files older than our execution time
797 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
803 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
805 Logger.log(["I: Removing %s from the queue" % o.fullpath])
806 os.unlink(o.fullpath)
809 # If it wasn't there, don't worry
810 if e.errno == ENOENT:
813 # TODO: Replace with proper logging call
814 Logger.log(["E: Could not remove %s" % o.fullpath])
821 for f in os.listdir(self.path):
822 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
826 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
827 except NoResultFound:
828 fp = os.path.join(self.path, f)
830 Logger.log(["I: Would remove unused link %s" % fp])
832 Logger.log(["I: Removing unused link %s" % fp])
836 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
838 def add_file_from_pool(self, poolfile):
839 """Copies a file into the pool. Assumes that the PoolFile object is
840 attached to the same SQLAlchemy session as the Queue object is.
842 The caller is responsible for committing after calling this function."""
843 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
845 # Check if we have a file of this name or this ID already
846 for f in self.queuefiles:
847 if f.fileid is not None and f.fileid == poolfile.file_id or \
848 f.poolfile.filename == poolfile_basename:
849 # In this case, update the BuildQueueFile entry so we
850 # don't remove it too early
851 f.lastused = datetime.now()
852 DBConn().session().object_session(poolfile).add(f)
855 # Prepare BuildQueueFile object
856 qf = BuildQueueFile()
857 qf.build_queue_id = self.queue_id
858 qf.lastused = datetime.now()
859 qf.filename = poolfile_basename
861 targetpath = poolfile.fullpath
862 queuepath = os.path.join(self.path, poolfile_basename)
866 # We need to copy instead of symlink
868 utils.copy(targetpath, queuepath)
869 # NULL in the fileid field implies a copy
872 os.symlink(targetpath, queuepath)
873 qf.fileid = poolfile.file_id
877 # Get the same session as the PoolFile is using and add the qf to it
878 DBConn().session().object_session(poolfile).add(qf)
883 __all__.append('BuildQueue')
886 def get_build_queue(queuename, session=None):
888 Returns BuildQueue object for given C{queue name}, creating it if it does not
891 @type queuename: string
892 @param queuename: The name of the queue
894 @type session: Session
895 @param session: Optional SQLA session object (a temporary one will be
896 generated if not supplied)
899 @return: BuildQueue object for the given queue
902 q = session.query(BuildQueue).filter_by(queue_name=queuename)
906 except NoResultFound:
909 __all__.append('get_build_queue')
911 ################################################################################
913 class BuildQueueFile(object):
914 def __init__(self, *args, **kwargs):
918 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
922 return os.path.join(self.buildqueue.path, self.filename)
925 __all__.append('BuildQueueFile')
927 ################################################################################
929 class ChangePendingBinary(object):
930 def __init__(self, *args, **kwargs):
934 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
936 __all__.append('ChangePendingBinary')
938 ################################################################################
940 class ChangePendingFile(object):
941 def __init__(self, *args, **kwargs):
945 return '<ChangePendingFile %s>' % self.change_pending_file_id
947 __all__.append('ChangePendingFile')
949 ################################################################################
951 class ChangePendingSource(object):
952 def __init__(self, *args, **kwargs):
956 return '<ChangePendingSource %s>' % self.change_pending_source_id
958 __all__.append('ChangePendingSource')
960 ################################################################################
962 class Component(ORMObject):
963 def __init__(self, component_name = None):
964 self.component_name = component_name
966 def __eq__(self, val):
967 if isinstance(val, str):
968 return (self.component_name == val)
969 # This signals to use the normal comparison operator
970 return NotImplemented
972 def __ne__(self, val):
973 if isinstance(val, str):
974 return (self.component_name != val)
975 # This signals to use the normal comparison operator
976 return NotImplemented
978 def properties(self):
979 return ['component_name', 'component_id', 'description', \
980 'location_count', 'meets_dfsg', 'overrides_count']
982 def not_null_constraints(self):
983 return ['component_name']
986 __all__.append('Component')
989 def get_component(component, session=None):
991 Returns database id for given C{component}.
993 @type component: string
994 @param component: The name of the override type
997 @return: the database id for the given component
1000 component = component.lower()
1002 q = session.query(Component).filter_by(component_name=component)
1006 except NoResultFound:
1009 __all__.append('get_component')
1011 ################################################################################
1013 class DBConfig(object):
1014 def __init__(self, *args, **kwargs):
1018 return '<DBConfig %s>' % self.name
1020 __all__.append('DBConfig')
1022 ################################################################################
1025 def get_or_set_contents_file_id(filename, session=None):
1027 Returns database id for given filename.
1029 If no matching file is found, a row is inserted.
1031 @type filename: string
1032 @param filename: The filename
1033 @type session: SQLAlchemy
1034 @param session: Optional SQL session object (a temporary one will be
1035 generated if not supplied). If not passed, a commit will be performed at
1036 the end of the function, otherwise the caller is responsible for commiting.
1039 @return: the database id for the given component
1042 q = session.query(ContentFilename).filter_by(filename=filename)
1045 ret = q.one().cafilename_id
1046 except NoResultFound:
1047 cf = ContentFilename()
1048 cf.filename = filename
1050 session.commit_or_flush()
1051 ret = cf.cafilename_id
1055 __all__.append('get_or_set_contents_file_id')
1058 def get_contents(suite, overridetype, section=None, session=None):
1060 Returns contents for a suite / overridetype combination, limiting
1061 to a section if not None.
1064 @param suite: Suite object
1066 @type overridetype: OverrideType
1067 @param overridetype: OverrideType object
1069 @type section: Section
1070 @param section: Optional section object to limit results to
1072 @type session: SQLAlchemy
1073 @param session: Optional SQL session object (a temporary one will be
1074 generated if not supplied)
1076 @rtype: ResultsProxy
1077 @return: ResultsProxy object set up to return tuples of (filename, section,
1081 # find me all of the contents for a given suite
1082 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1086 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1087 JOIN content_file_names n ON (c.filename=n.id)
1088 JOIN binaries b ON (b.id=c.binary_pkg)
1089 JOIN override o ON (o.package=b.package)
1090 JOIN section s ON (s.id=o.section)
1091 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1092 AND b.type=:overridetypename"""
1094 vals = {'suiteid': suite.suite_id,
1095 'overridetypeid': overridetype.overridetype_id,
1096 'overridetypename': overridetype.overridetype}
1098 if section is not None:
1099 contents_q += " AND s.id = :sectionid"
1100 vals['sectionid'] = section.section_id
1102 contents_q += " ORDER BY fn"
1104 return session.execute(contents_q, vals)
1106 __all__.append('get_contents')
1108 ################################################################################
1110 class ContentFilepath(object):
1111 def __init__(self, *args, **kwargs):
1115 return '<ContentFilepath %s>' % self.filepath
1117 __all__.append('ContentFilepath')
1120 def get_or_set_contents_path_id(filepath, session=None):
1122 Returns database id for given path.
1124 If no matching file is found, a row is inserted.
1126 @type filepath: string
1127 @param filepath: The filepath
1129 @type session: SQLAlchemy
1130 @param session: Optional SQL session object (a temporary one will be
1131 generated if not supplied). If not passed, a commit will be performed at
1132 the end of the function, otherwise the caller is responsible for commiting.
1135 @return: the database id for the given path
1138 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1141 ret = q.one().cafilepath_id
1142 except NoResultFound:
1143 cf = ContentFilepath()
1144 cf.filepath = filepath
1146 session.commit_or_flush()
1147 ret = cf.cafilepath_id
1151 __all__.append('get_or_set_contents_path_id')
1153 ################################################################################
1155 class ContentAssociation(object):
1156 def __init__(self, *args, **kwargs):
1160 return '<ContentAssociation %s>' % self.ca_id
1162 __all__.append('ContentAssociation')
1164 def insert_content_paths(binary_id, fullpaths, session=None):
1166 Make sure given path is associated with given binary id
1168 @type binary_id: int
1169 @param binary_id: the id of the binary
1170 @type fullpaths: list
1171 @param fullpaths: the list of paths of the file being associated with the binary
1172 @type session: SQLAlchemy session
1173 @param session: Optional SQLAlchemy session. If this is passed, the caller
1174 is responsible for ensuring a transaction has begun and committing the
1175 results or rolling back based on the result code. If not passed, a commit
1176 will be performed at the end of the function, otherwise the caller is
1177 responsible for commiting.
1179 @return: True upon success
1182 privatetrans = False
1184 session = DBConn().session()
1189 def generate_path_dicts():
1190 for fullpath in fullpaths:
1191 if fullpath.startswith( './' ):
1192 fullpath = fullpath[2:]
1194 yield {'filename':fullpath, 'id': binary_id }
1196 for d in generate_path_dicts():
1197 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1206 traceback.print_exc()
1208 # Only rollback if we set up the session ourself
1215 __all__.append('insert_content_paths')
1217 ################################################################################
1219 class DSCFile(object):
1220 def __init__(self, *args, **kwargs):
1224 return '<DSCFile %s>' % self.dscfile_id
1226 __all__.append('DSCFile')
1229 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1231 Returns a list of DSCFiles which may be empty
1233 @type dscfile_id: int (optional)
1234 @param dscfile_id: the dscfile_id of the DSCFiles to find
1236 @type source_id: int (optional)
1237 @param source_id: the source id related to the DSCFiles to find
1239 @type poolfile_id: int (optional)
1240 @param poolfile_id: the poolfile id related to the DSCFiles to find
1243 @return: Possibly empty list of DSCFiles
1246 q = session.query(DSCFile)
1248 if dscfile_id is not None:
1249 q = q.filter_by(dscfile_id=dscfile_id)
1251 if source_id is not None:
1252 q = q.filter_by(source_id=source_id)
1254 if poolfile_id is not None:
1255 q = q.filter_by(poolfile_id=poolfile_id)
1259 __all__.append('get_dscfiles')
1261 ################################################################################
1263 class PoolFile(ORMObject):
1264 def __init__(self, filename = None, location = None, filesize = -1, \
1266 self.filename = filename
1267 self.location = location
1268 self.filesize = filesize
1269 self.md5sum = md5sum
1273 return os.path.join(self.location.path, self.filename)
1275 def is_valid(self, filesize = -1, md5sum = None):
1276 return self.filesize == long(filesize) and self.md5sum == md5sum
1278 def properties(self):
1279 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1280 'sha256sum', 'location', 'source', 'binary', 'last_used']
1282 def not_null_constraints(self):
1283 return ['filename', 'md5sum', 'location']
1285 __all__.append('PoolFile')
1288 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1291 (ValidFileFound [boolean], PoolFile object or None)
1293 @type filename: string
1294 @param filename: the filename of the file to check against the DB
1297 @param filesize: the size of the file to check against the DB
1299 @type md5sum: string
1300 @param md5sum: the md5sum of the file to check against the DB
1302 @type location_id: int
1303 @param location_id: the id of the location to look in
1306 @return: Tuple of length 2.
1307 - If valid pool file found: (C{True}, C{PoolFile object})
1308 - If valid pool file not found:
1309 - (C{False}, C{None}) if no file found
1310 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1313 poolfile = session.query(Location).get(location_id). \
1314 files.filter_by(filename=filename).first()
1316 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1319 return (valid, poolfile)
1321 __all__.append('check_poolfile')
1323 # TODO: the implementation can trivially be inlined at the place where the
1324 # function is called
1326 def get_poolfile_by_id(file_id, session=None):
1328 Returns a PoolFile objects or None for the given id
1331 @param file_id: the id of the file to look for
1333 @rtype: PoolFile or None
1334 @return: either the PoolFile object or None
1337 return session.query(PoolFile).get(file_id)
1339 __all__.append('get_poolfile_by_id')
1342 def get_poolfile_like_name(filename, session=None):
1344 Returns an array of PoolFile objects which are like the given name
1346 @type filename: string
1347 @param filename: the filename of the file to check against the DB
1350 @return: array of PoolFile objects
1353 # TODO: There must be a way of properly using bind parameters with %FOO%
1354 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1358 __all__.append('get_poolfile_like_name')
1361 def add_poolfile(filename, datadict, location_id, session=None):
1363 Add a new file to the pool
1365 @type filename: string
1366 @param filename: filename
1368 @type datadict: dict
1369 @param datadict: dict with needed data
1371 @type location_id: int
1372 @param location_id: database id of the location
1375 @return: the PoolFile object created
1377 poolfile = PoolFile()
1378 poolfile.filename = filename
1379 poolfile.filesize = datadict["size"]
1380 poolfile.md5sum = datadict["md5sum"]
1381 poolfile.sha1sum = datadict["sha1sum"]
1382 poolfile.sha256sum = datadict["sha256sum"]
1383 poolfile.location_id = location_id
1385 session.add(poolfile)
1386 # Flush to get a file id (NB: This is not a commit)
1391 __all__.append('add_poolfile')
1393 ################################################################################
1395 class Fingerprint(ORMObject):
1396 def __init__(self, fingerprint = None):
1397 self.fingerprint = fingerprint
1399 def properties(self):
1400 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1403 def not_null_constraints(self):
1404 return ['fingerprint']
1406 __all__.append('Fingerprint')
1409 def get_fingerprint(fpr, session=None):
1411 Returns Fingerprint object for given fpr.
1414 @param fpr: The fpr to find / add
1416 @type session: SQLAlchemy
1417 @param session: Optional SQL session object (a temporary one will be
1418 generated if not supplied).
1421 @return: the Fingerprint object for the given fpr or None
1424 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1428 except NoResultFound:
1433 __all__.append('get_fingerprint')
1436 def get_or_set_fingerprint(fpr, session=None):
1438 Returns Fingerprint object for given fpr.
1440 If no matching fpr is found, a row is inserted.
1443 @param fpr: The fpr to find / add
1445 @type session: SQLAlchemy
1446 @param session: Optional SQL session object (a temporary one will be
1447 generated if not supplied). If not passed, a commit will be performed at
1448 the end of the function, otherwise the caller is responsible for commiting.
1449 A flush will be performed either way.
1452 @return: the Fingerprint object for the given fpr
1455 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1459 except NoResultFound:
1460 fingerprint = Fingerprint()
1461 fingerprint.fingerprint = fpr
1462 session.add(fingerprint)
1463 session.commit_or_flush()
1468 __all__.append('get_or_set_fingerprint')
1470 ################################################################################
1472 # Helper routine for Keyring class
1473 def get_ldap_name(entry):
1475 for k in ["cn", "mn", "sn"]:
1477 if ret and ret[0] != "" and ret[0] != "-":
1479 return " ".join(name)
1481 ################################################################################
1483 class Keyring(object):
1484 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1485 " --with-colons --fingerprint --fingerprint"
1490 def __init__(self, *args, **kwargs):
1494 return '<Keyring %s>' % self.keyring_name
1496 def de_escape_gpg_str(self, txt):
1497 esclist = re.split(r'(\\x..)', txt)
1498 for x in range(1,len(esclist),2):
1499 esclist[x] = "%c" % (int(esclist[x][2:],16))
1500 return "".join(esclist)
1502 def parse_address(self, uid):
1503 """parses uid and returns a tuple of real name and email address"""
1505 (name, address) = email.Utils.parseaddr(uid)
1506 name = re.sub(r"\s*[(].*[)]", "", name)
1507 name = self.de_escape_gpg_str(name)
1510 return (name, address)
1512 def load_keys(self, keyring):
1513 if not self.keyring_id:
1514 raise Exception('Must be initialized with database information')
1516 k = os.popen(self.gpg_invocation % keyring, "r")
1520 for line in k.xreadlines():
1521 field = line.split(":")
1522 if field[0] == "pub":
1525 (name, addr) = self.parse_address(field[9])
1527 self.keys[key]["email"] = addr
1528 self.keys[key]["name"] = name
1529 self.keys[key]["fingerprints"] = []
1531 elif key and field[0] == "sub" and len(field) >= 12:
1532 signingkey = ("s" in field[11])
1533 elif key and field[0] == "uid":
1534 (name, addr) = self.parse_address(field[9])
1535 if "email" not in self.keys[key] and "@" in addr:
1536 self.keys[key]["email"] = addr
1537 self.keys[key]["name"] = name
1538 elif signingkey and field[0] == "fpr":
1539 self.keys[key]["fingerprints"].append(field[9])
1540 self.fpr_lookup[field[9]] = key
1542 def import_users_from_ldap(self, session):
1546 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1547 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1549 l = ldap.open(LDAPServer)
1550 l.simple_bind_s("","")
1551 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1552 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1553 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1555 ldap_fin_uid_id = {}
1562 uid = entry["uid"][0]
1563 name = get_ldap_name(entry)
1564 fingerprints = entry["keyFingerPrint"]
1566 for f in fingerprints:
1567 key = self.fpr_lookup.get(f, None)
1568 if key not in self.keys:
1570 self.keys[key]["uid"] = uid
1574 keyid = get_or_set_uid(uid, session).uid_id
1575 byuid[keyid] = (uid, name)
1576 byname[uid] = (keyid, name)
1578 return (byname, byuid)
1580 def generate_users_from_keyring(self, format, session):
1584 for x in self.keys.keys():
1585 if "email" not in self.keys[x]:
1587 self.keys[x]["uid"] = format % "invalid-uid"
1589 uid = format % self.keys[x]["email"]
1590 keyid = get_or_set_uid(uid, session).uid_id
1591 byuid[keyid] = (uid, self.keys[x]["name"])
1592 byname[uid] = (keyid, self.keys[x]["name"])
1593 self.keys[x]["uid"] = uid
1596 uid = format % "invalid-uid"
1597 keyid = get_or_set_uid(uid, session).uid_id
1598 byuid[keyid] = (uid, "ungeneratable user id")
1599 byname[uid] = (keyid, "ungeneratable user id")
1601 return (byname, byuid)
1603 __all__.append('Keyring')
1606 def get_keyring(keyring, session=None):
1608 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1609 If C{keyring} already has an entry, simply return the existing Keyring
1611 @type keyring: string
1612 @param keyring: the keyring name
1615 @return: the Keyring object for this keyring
1618 q = session.query(Keyring).filter_by(keyring_name=keyring)
1622 except NoResultFound:
1625 __all__.append('get_keyring')
1627 ################################################################################
1629 class KeyringACLMap(object):
1630 def __init__(self, *args, **kwargs):
1634 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1636 __all__.append('KeyringACLMap')
1638 ################################################################################
1640 class DBChange(object):
1641 def __init__(self, *args, **kwargs):
1645 return '<DBChange %s>' % self.changesname
1647 def clean_from_queue(self):
1648 session = DBConn().session().object_session(self)
1650 # Remove changes_pool_files entries
1653 # Remove changes_pending_files references
1656 # Clear out of queue
1657 self.in_queue = None
1658 self.approved_for_id = None
1660 __all__.append('DBChange')
1663 def get_dbchange(filename, session=None):
1665 returns DBChange object for given C{filename}.
1667 @type filename: string
1668 @param filename: the name of the file
1670 @type session: Session
1671 @param session: Optional SQLA session object (a temporary one will be
1672 generated if not supplied)
1675 @return: DBChange object for the given filename (C{None} if not present)
1678 q = session.query(DBChange).filter_by(changesname=filename)
1682 except NoResultFound:
1685 __all__.append('get_dbchange')
1687 ################################################################################
1689 class Location(ORMObject):
1690 def __init__(self, path = None, component = None):
1692 self.component = component
1693 # the column 'type' should go away, see comment at mapper
1694 self.archive_type = 'pool'
1696 def properties(self):
1697 return ['path', 'location_id', 'archive_type', 'component', \
1700 def not_null_constraints(self):
1701 return ['path', 'archive_type']
1703 __all__.append('Location')
1706 def get_location(location, component=None, archive=None, session=None):
1708 Returns Location object for the given combination of location, component
1711 @type location: string
1712 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1714 @type component: string
1715 @param component: the component name (if None, no restriction applied)
1717 @type archive: string
1718 @param archive: the archive name (if None, no restriction applied)
1720 @rtype: Location / None
1721 @return: Either a Location object or None if one can't be found
1724 q = session.query(Location).filter_by(path=location)
1726 if archive is not None:
1727 q = q.join(Archive).filter_by(archive_name=archive)
1729 if component is not None:
1730 q = q.join(Component).filter_by(component_name=component)
1734 except NoResultFound:
1737 __all__.append('get_location')
1739 ################################################################################
1741 class Maintainer(ORMObject):
1742 def __init__(self, name = None):
1745 def properties(self):
1746 return ['name', 'maintainer_id']
1748 def not_null_constraints(self):
1751 def get_split_maintainer(self):
1752 if not hasattr(self, 'name') or self.name is None:
1753 return ('', '', '', '')
1755 return fix_maintainer(self.name.strip())
1757 __all__.append('Maintainer')
1760 def get_or_set_maintainer(name, session=None):
1762 Returns Maintainer object for given maintainer name.
1764 If no matching maintainer name is found, a row is inserted.
1767 @param name: The maintainer name to add
1769 @type session: SQLAlchemy
1770 @param session: Optional SQL session object (a temporary one will be
1771 generated if not supplied). If not passed, a commit will be performed at
1772 the end of the function, otherwise the caller is responsible for commiting.
1773 A flush will be performed either way.
1776 @return: the Maintainer object for the given maintainer
1779 q = session.query(Maintainer).filter_by(name=name)
1782 except NoResultFound:
1783 maintainer = Maintainer()
1784 maintainer.name = name
1785 session.add(maintainer)
1786 session.commit_or_flush()
1791 __all__.append('get_or_set_maintainer')
1794 def get_maintainer(maintainer_id, session=None):
1796 Return the name of the maintainer behind C{maintainer_id} or None if that
1797 maintainer_id is invalid.
1799 @type maintainer_id: int
1800 @param maintainer_id: the id of the maintainer
1803 @return: the Maintainer with this C{maintainer_id}
1806 return session.query(Maintainer).get(maintainer_id)
1808 __all__.append('get_maintainer')
1810 ################################################################################
1812 class NewComment(object):
1813 def __init__(self, *args, **kwargs):
1817 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1819 __all__.append('NewComment')
1822 def has_new_comment(package, version, session=None):
1824 Returns true if the given combination of C{package}, C{version} has a comment.
1826 @type package: string
1827 @param package: name of the package
1829 @type version: string
1830 @param version: package version
1832 @type session: Session
1833 @param session: Optional SQLA session object (a temporary one will be
1834 generated if not supplied)
1840 q = session.query(NewComment)
1841 q = q.filter_by(package=package)
1842 q = q.filter_by(version=version)
1844 return bool(q.count() > 0)
1846 __all__.append('has_new_comment')
1849 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1851 Returns (possibly empty) list of NewComment objects for the given
1854 @type package: string (optional)
1855 @param package: name of the package
1857 @type version: string (optional)
1858 @param version: package version
1860 @type comment_id: int (optional)
1861 @param comment_id: An id of a comment
1863 @type session: Session
1864 @param session: Optional SQLA session object (a temporary one will be
1865 generated if not supplied)
1868 @return: A (possibly empty) list of NewComment objects will be returned
1871 q = session.query(NewComment)
1872 if package is not None: q = q.filter_by(package=package)
1873 if version is not None: q = q.filter_by(version=version)
1874 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1878 __all__.append('get_new_comments')
1880 ################################################################################
1882 class Override(ORMObject):
1883 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1884 section = None, priority = None):
1885 self.package = package
1887 self.component = component
1888 self.overridetype = overridetype
1889 self.section = section
1890 self.priority = priority
1892 def properties(self):
1893 return ['package', 'suite', 'component', 'overridetype', 'section', \
1896 def not_null_constraints(self):
1897 return ['package', 'suite', 'component', 'overridetype', 'section']
1899 __all__.append('Override')
1902 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1904 Returns Override object for the given parameters
1906 @type package: string
1907 @param package: The name of the package
1909 @type suite: string, list or None
1910 @param suite: The name of the suite (or suites if a list) to limit to. If
1911 None, don't limit. Defaults to None.
1913 @type component: string, list or None
1914 @param component: The name of the component (or components if a list) to
1915 limit to. If None, don't limit. Defaults to None.
1917 @type overridetype: string, list or None
1918 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1919 limit to. If None, don't limit. Defaults to None.
1921 @type session: Session
1922 @param session: Optional SQLA session object (a temporary one will be
1923 generated if not supplied)
1926 @return: A (possibly empty) list of Override objects will be returned
1929 q = session.query(Override)
1930 q = q.filter_by(package=package)
1932 if suite is not None:
1933 if not isinstance(suite, list): suite = [suite]
1934 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1936 if component is not None:
1937 if not isinstance(component, list): component = [component]
1938 q = q.join(Component).filter(Component.component_name.in_(component))
1940 if overridetype is not None:
1941 if not isinstance(overridetype, list): overridetype = [overridetype]
1942 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1946 __all__.append('get_override')
1949 ################################################################################
1951 class OverrideType(ORMObject):
1952 def __init__(self, overridetype = None):
1953 self.overridetype = overridetype
1955 def properties(self):
1956 return ['overridetype', 'overridetype_id', 'overrides_count']
1958 def not_null_constraints(self):
1959 return ['overridetype']
1961 __all__.append('OverrideType')
1964 def get_override_type(override_type, session=None):
1966 Returns OverrideType object for given C{override type}.
1968 @type override_type: string
1969 @param override_type: The name of the override type
1971 @type session: Session
1972 @param session: Optional SQLA session object (a temporary one will be
1973 generated if not supplied)
1976 @return: the database id for the given override type
1979 q = session.query(OverrideType).filter_by(overridetype=override_type)
1983 except NoResultFound:
1986 __all__.append('get_override_type')
1988 ################################################################################
1990 class PolicyQueue(object):
1991 def __init__(self, *args, **kwargs):
1995 return '<PolicyQueue %s>' % self.queue_name
1997 __all__.append('PolicyQueue')
2000 def get_policy_queue(queuename, session=None):
2002 Returns PolicyQueue object for given C{queue name}
2004 @type queuename: string
2005 @param queuename: The name of the queue
2007 @type session: Session
2008 @param session: Optional SQLA session object (a temporary one will be
2009 generated if not supplied)
2012 @return: PolicyQueue object for the given queue
2015 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2019 except NoResultFound:
2022 __all__.append('get_policy_queue')
2025 def get_policy_queue_from_path(pathname, session=None):
2027 Returns PolicyQueue object for given C{path name}
2029 @type queuename: string
2030 @param queuename: The path
2032 @type session: Session
2033 @param session: Optional SQLA session object (a temporary one will be
2034 generated if not supplied)
2037 @return: PolicyQueue object for the given queue
2040 q = session.query(PolicyQueue).filter_by(path=pathname)
2044 except NoResultFound:
2047 __all__.append('get_policy_queue_from_path')
2049 ################################################################################
2051 class Priority(ORMObject):
2052 def __init__(self, priority = None, level = None):
2053 self.priority = priority
2056 def properties(self):
2057 return ['priority', 'priority_id', 'level', 'overrides_count']
2059 def not_null_constraints(self):
2060 return ['priority', 'level']
2062 def __eq__(self, val):
2063 if isinstance(val, str):
2064 return (self.priority == val)
2065 # This signals to use the normal comparison operator
2066 return NotImplemented
2068 def __ne__(self, val):
2069 if isinstance(val, str):
2070 return (self.priority != val)
2071 # This signals to use the normal comparison operator
2072 return NotImplemented
2074 __all__.append('Priority')
2077 def get_priority(priority, session=None):
2079 Returns Priority object for given C{priority name}.
2081 @type priority: string
2082 @param priority: The name of the priority
2084 @type session: Session
2085 @param session: Optional SQLA session object (a temporary one will be
2086 generated if not supplied)
2089 @return: Priority object for the given priority
2092 q = session.query(Priority).filter_by(priority=priority)
2096 except NoResultFound:
2099 __all__.append('get_priority')
2102 def get_priorities(session=None):
2104 Returns dictionary of priority names -> id mappings
2106 @type session: Session
2107 @param session: Optional SQL session object (a temporary one will be
2108 generated if not supplied)
2111 @return: dictionary of priority names -> id mappings
2115 q = session.query(Priority)
2117 ret[x.priority] = x.priority_id
2121 __all__.append('get_priorities')
2123 ################################################################################
2125 class Section(ORMObject):
2126 def __init__(self, section = None):
2127 self.section = section
2129 def properties(self):
2130 return ['section', 'section_id', 'overrides_count']
2132 def not_null_constraints(self):
2135 def __eq__(self, val):
2136 if isinstance(val, str):
2137 return (self.section == val)
2138 # This signals to use the normal comparison operator
2139 return NotImplemented
2141 def __ne__(self, val):
2142 if isinstance(val, str):
2143 return (self.section != val)
2144 # This signals to use the normal comparison operator
2145 return NotImplemented
2147 __all__.append('Section')
2150 def get_section(section, session=None):
2152 Returns Section object for given C{section name}.
2154 @type section: string
2155 @param section: The name of the section
2157 @type session: Session
2158 @param session: Optional SQLA session object (a temporary one will be
2159 generated if not supplied)
2162 @return: Section object for the given section name
2165 q = session.query(Section).filter_by(section=section)
2169 except NoResultFound:
2172 __all__.append('get_section')
2175 def get_sections(session=None):
2177 Returns dictionary of section names -> id mappings
2179 @type session: Session
2180 @param session: Optional SQL session object (a temporary one will be
2181 generated if not supplied)
2184 @return: dictionary of section names -> id mappings
2188 q = session.query(Section)
2190 ret[x.section] = x.section_id
2194 __all__.append('get_sections')
2196 ################################################################################
2198 class SrcContents(ORMObject):
2199 def __init__(self, file = None, source = None):
2201 self.source = source
2203 def properties(self):
2204 return ['file', 'source']
2206 __all__.append('SrcContents')
2208 ################################################################################
2210 from debian.debfile import Deb822
2212 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
2213 class Dak822(Deb822):
2214 def _internal_parser(self, sequence, fields=None):
2215 # The key is non-whitespace, non-colon characters before any colon.
2216 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
2217 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
2218 multi = re.compile(key_part + r"$")
2219 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
2221 wanted_field = lambda f: fields is None or f in fields
2223 if isinstance(sequence, basestring):
2224 sequence = sequence.splitlines()
2228 for line in self.gpg_stripped_paragraph(sequence):
2229 m = single.match(line)
2232 self[curkey] = content
2234 if not wanted_field(m.group('key')):
2238 curkey = m.group('key')
2239 content = m.group('data')
2242 m = multi.match(line)
2245 self[curkey] = content
2247 if not wanted_field(m.group('key')):
2251 curkey = m.group('key')
2255 m = multidata.match(line)
2257 content += '\n' + line # XXX not m.group('data')?
2261 self[curkey] = content
2264 class DBSource(ORMObject):
2265 def __init__(self, source = None, version = None, maintainer = None, \
2266 changedby = None, poolfile = None, install_date = None):
2267 self.source = source
2268 self.version = version
2269 self.maintainer = maintainer
2270 self.changedby = changedby
2271 self.poolfile = poolfile
2272 self.install_date = install_date
2276 return self.source_id
2278 def properties(self):
2279 return ['source', 'source_id', 'maintainer', 'changedby', \
2280 'fingerprint', 'poolfile', 'version', 'suites_count', \
2281 'install_date', 'binaries_count', 'uploaders_count']
2283 def not_null_constraints(self):
2284 return ['source', 'version', 'install_date', 'maintainer', \
2285 'changedby', 'poolfile', 'install_date']
2287 def read_control_fields(self):
2289 Reads the control information from a dsc
2292 @return: fields is the dsc information in a dictionary form
2294 fullpath = self.poolfile.fullpath
2295 fields = Dak822(open(self.poolfile.fullpath, 'r'))
2298 metadata = association_proxy('key', 'value')
2300 def scan_contents(self):
2302 Returns a set of names for non directories. The path names are
2303 normalized after converting them from either utf-8 or iso8859-1
2306 fullpath = self.poolfile.fullpath
2307 from daklib.contents import UnpackedSource
2308 unpacked = UnpackedSource(fullpath)
2310 for name in unpacked.get_all_filenames():
2311 # enforce proper utf-8 encoding
2313 name.decode('utf-8')
2314 except UnicodeDecodeError:
2315 name = name.decode('iso8859-1').encode('utf-8')
2319 __all__.append('DBSource')
2322 def source_exists(source, source_version, suites = ["any"], session=None):
2324 Ensure that source exists somewhere in the archive for the binary
2325 upload being processed.
2326 1. exact match => 1.0-3
2327 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2329 @type source: string
2330 @param source: source name
2332 @type source_version: string
2333 @param source_version: expected source version
2336 @param suites: list of suites to check in, default I{any}
2338 @type session: Session
2339 @param session: Optional SQLA session object (a temporary one will be
2340 generated if not supplied)
2343 @return: returns 1 if a source with expected version is found, otherwise 0
2350 from daklib.regexes import re_bin_only_nmu
2351 orig_source_version = re_bin_only_nmu.sub('', source_version)
2353 for suite in suites:
2354 q = session.query(DBSource).filter_by(source=source). \
2355 filter(DBSource.version.in_([source_version, orig_source_version]))
2357 # source must exist in suite X, or in some other suite that's
2358 # mapped to X, recursively... silent-maps are counted too,
2359 # unreleased-maps aren't.
2360 maps = cnf.ValueList("SuiteMappings")[:]
2362 maps = [ m.split() for m in maps ]
2363 maps = [ (x[1], x[2]) for x in maps
2364 if x[0] == "map" or x[0] == "silent-map" ]
2366 for (from_, to) in maps:
2367 if from_ in s and to not in s:
2370 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2375 # No source found so return not ok
2380 __all__.append('source_exists')
2383 def get_suites_source_in(source, session=None):
2385 Returns list of Suite objects which given C{source} name is in
2388 @param source: DBSource package name to search for
2391 @return: list of Suite objects for the given source
2394 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2396 __all__.append('get_suites_source_in')
2399 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2401 Returns list of DBSource objects for given C{source} name and other parameters
2404 @param source: DBSource package name to search for
2406 @type version: str or None
2407 @param version: DBSource version name to search for or None if not applicable
2409 @type dm_upload_allowed: bool
2410 @param dm_upload_allowed: If None, no effect. If True or False, only
2411 return packages with that dm_upload_allowed setting
2413 @type session: Session
2414 @param session: Optional SQL session object (a temporary one will be
2415 generated if not supplied)
2418 @return: list of DBSource objects for the given name (may be empty)
2421 q = session.query(DBSource).filter_by(source=source)
2423 if version is not None:
2424 q = q.filter_by(version=version)
2426 if dm_upload_allowed is not None:
2427 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2431 __all__.append('get_sources_from_name')
2433 # FIXME: This function fails badly if it finds more than 1 source package and
2434 # its implementation is trivial enough to be inlined.
2436 def get_source_in_suite(source, suite, session=None):
2438 Returns a DBSource object for a combination of C{source} and C{suite}.
2440 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2441 - B{suite} - a suite name, eg. I{unstable}
2443 @type source: string
2444 @param source: source package name
2447 @param suite: the suite name
2450 @return: the version for I{source} in I{suite}
2454 q = get_suite(suite, session).get_sources(source)
2457 except NoResultFound:
2460 __all__.append('get_source_in_suite')
2463 def import_metadata_into_db(obj, session=None):
2465 This routine works on either DBBinary or DBSource objects and imports
2466 their metadata into the database
2468 fields = obj.read_control_fields()
2469 for k in fields.keys():
2472 val = str(fields[k])
2473 except UnicodeEncodeError:
2474 # Fall back to UTF-8
2476 val = fields[k].encode('utf-8')
2477 except UnicodeEncodeError:
2478 # Finally try iso8859-1
2479 val = fields[k].encode('iso8859-1')
2480 # Otherwise we allow the exception to percolate up and we cause
2481 # a reject as someone is playing silly buggers
2483 obj.metadata[get_or_set_metadatakey(k, session)] = val
2485 session.commit_or_flush()
2487 __all__.append('import_metadata_into_db')
2490 ################################################################################
2493 def add_dsc_to_db(u, filename, session=None):
2494 entry = u.pkg.files[filename]
2498 source.source = u.pkg.dsc["source"]
2499 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2500 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2501 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2502 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2503 source.install_date = datetime.now().date()
2505 dsc_component = entry["component"]
2506 dsc_location_id = entry["location id"]
2508 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2510 # Set up a new poolfile if necessary
2511 if not entry.has_key("files id") or not entry["files id"]:
2512 filename = entry["pool name"] + filename
2513 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2515 pfs.append(poolfile)
2516 entry["files id"] = poolfile.file_id
2518 source.poolfile_id = entry["files id"]
2521 suite_names = u.pkg.changes["distribution"].keys()
2522 source.suites = session.query(Suite). \
2523 filter(Suite.suite_name.in_(suite_names)).all()
2525 # Add the source files to the DB (files and dsc_files)
2527 dscfile.source_id = source.source_id
2528 dscfile.poolfile_id = entry["files id"]
2529 session.add(dscfile)
2531 for dsc_file, dentry in u.pkg.dsc_files.items():
2533 df.source_id = source.source_id
2535 # If the .orig tarball is already in the pool, it's
2536 # files id is stored in dsc_files by check_dsc().
2537 files_id = dentry.get("files id", None)
2539 # Find the entry in the files hash
2540 # TODO: Bail out here properly
2542 for f, e in u.pkg.files.items():
2547 if files_id is None:
2548 filename = dfentry["pool name"] + dsc_file
2550 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2551 # FIXME: needs to check for -1/-2 and or handle exception
2552 if found and obj is not None:
2553 files_id = obj.file_id
2556 # If still not found, add it
2557 if files_id is None:
2558 # HACK: Force sha1sum etc into dentry
2559 dentry["sha1sum"] = dfentry["sha1sum"]
2560 dentry["sha256sum"] = dfentry["sha256sum"]
2561 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2562 pfs.append(poolfile)
2563 files_id = poolfile.file_id
2565 poolfile = get_poolfile_by_id(files_id, session)
2566 if poolfile is None:
2567 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2568 pfs.append(poolfile)
2570 df.poolfile_id = files_id
2573 # Add the src_uploaders to the DB
2574 source.uploaders = [source.maintainer]
2575 if u.pkg.dsc.has_key("uploaders"):
2576 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2578 source.uploaders.append(get_or_set_maintainer(up, session))
2582 return source, dsc_component, dsc_location_id, pfs
2584 __all__.append('add_dsc_to_db')
2587 def add_deb_to_db(u, filename, session=None):
2589 Contrary to what you might expect, this routine deals with both
2590 debs and udebs. That info is in 'dbtype', whilst 'type' is
2591 'deb' for both of them
2594 entry = u.pkg.files[filename]
2597 bin.package = entry["package"]
2598 bin.version = entry["version"]
2599 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2600 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2601 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2602 bin.binarytype = entry["dbtype"]
2605 filename = entry["pool name"] + filename
2606 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2607 if not entry.get("location id", None):
2608 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2610 if entry.get("files id", None):
2611 poolfile = get_poolfile_by_id(bin.poolfile_id)
2612 bin.poolfile_id = entry["files id"]
2614 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2615 bin.poolfile_id = entry["files id"] = poolfile.file_id
2618 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2619 if len(bin_sources) != 1:
2620 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2621 (bin.package, bin.version, entry["architecture"],
2622 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2624 bin.source_id = bin_sources[0].source_id
2626 if entry.has_key("built-using"):
2627 for srcname, version in entry["built-using"]:
2628 exsources = get_sources_from_name(srcname, version, session=session)
2629 if len(exsources) != 1:
2630 raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2631 (srcname, version, bin.package, bin.version, entry["architecture"],
2632 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2634 bin.extra_sources.append(exsources[0])
2636 # Add and flush object so it has an ID
2639 suite_names = u.pkg.changes["distribution"].keys()
2640 bin.suites = session.query(Suite). \
2641 filter(Suite.suite_name.in_(suite_names)).all()
2645 # Deal with contents - disabled for now
2646 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2648 # print "REJECT\nCould not determine contents of package %s" % bin.package
2649 # session.rollback()
2650 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2652 return bin, poolfile
2654 __all__.append('add_deb_to_db')
2656 ################################################################################
2658 class SourceACL(object):
2659 def __init__(self, *args, **kwargs):
2663 return '<SourceACL %s>' % self.source_acl_id
2665 __all__.append('SourceACL')
2667 ################################################################################
2669 class SrcFormat(object):
2670 def __init__(self, *args, **kwargs):
2674 return '<SrcFormat %s>' % (self.format_name)
2676 __all__.append('SrcFormat')
2678 ################################################################################
2680 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2681 ('SuiteID', 'suite_id'),
2682 ('Version', 'version'),
2683 ('Origin', 'origin'),
2685 ('Description', 'description'),
2686 ('Untouchable', 'untouchable'),
2687 ('Announce', 'announce'),
2688 ('Codename', 'codename'),
2689 ('OverrideCodename', 'overridecodename'),
2690 ('ValidTime', 'validtime'),
2691 ('Priority', 'priority'),
2692 ('NotAutomatic', 'notautomatic'),
2693 ('CopyChanges', 'copychanges'),
2694 ('OverrideSuite', 'overridesuite')]
2696 # Why the heck don't we have any UNIQUE constraints in table suite?
2697 # TODO: Add UNIQUE constraints for appropriate columns.
2698 class Suite(ORMObject):
2699 def __init__(self, suite_name = None, version = None):
2700 self.suite_name = suite_name
2701 self.version = version
2703 def properties(self):
2704 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2707 def not_null_constraints(self):
2708 return ['suite_name']
2710 def __eq__(self, val):
2711 if isinstance(val, str):
2712 return (self.suite_name == val)
2713 # This signals to use the normal comparison operator
2714 return NotImplemented
2716 def __ne__(self, val):
2717 if isinstance(val, str):
2718 return (self.suite_name != val)
2719 # This signals to use the normal comparison operator
2720 return NotImplemented
2724 for disp, field in SUITE_FIELDS:
2725 val = getattr(self, field, None)
2727 ret.append("%s: %s" % (disp, val))
2729 return "\n".join(ret)
2731 def get_architectures(self, skipsrc=False, skipall=False):
2733 Returns list of Architecture objects
2735 @type skipsrc: boolean
2736 @param skipsrc: Whether to skip returning the 'source' architecture entry
2739 @type skipall: boolean
2740 @param skipall: Whether to skip returning the 'all' architecture entry
2744 @return: list of Architecture objects for the given name (may be empty)
2747 q = object_session(self).query(Architecture).with_parent(self)
2749 q = q.filter(Architecture.arch_string != 'source')
2751 q = q.filter(Architecture.arch_string != 'all')
2752 return q.order_by(Architecture.arch_string).all()
2754 def get_sources(self, source):
2756 Returns a query object representing DBSource that is part of C{suite}.
2758 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2760 @type source: string
2761 @param source: source package name
2763 @rtype: sqlalchemy.orm.query.Query
2764 @return: a query of DBSource
2768 session = object_session(self)
2769 return session.query(DBSource).filter_by(source = source). \
2772 __all__.append('Suite')
2775 def get_suite(suite, session=None):
2777 Returns Suite object for given C{suite name}.
2780 @param suite: The name of the suite
2782 @type session: Session
2783 @param session: Optional SQLA session object (a temporary one will be
2784 generated if not supplied)
2787 @return: Suite object for the requested suite name (None if not present)
2790 q = session.query(Suite).filter_by(suite_name=suite)
2794 except NoResultFound:
2797 __all__.append('get_suite')
2799 ################################################################################
2801 # TODO: should be removed because the implementation is too trivial
2803 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2805 Returns list of Architecture objects for given C{suite} name
2808 @param suite: Suite name to search for
2810 @type skipsrc: boolean
2811 @param skipsrc: Whether to skip returning the 'source' architecture entry
2814 @type skipall: boolean
2815 @param skipall: Whether to skip returning the 'all' architecture entry
2818 @type session: Session
2819 @param session: Optional SQL session object (a temporary one will be
2820 generated if not supplied)
2823 @return: list of Architecture objects for the given name (may be empty)
2826 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2828 __all__.append('get_suite_architectures')
2830 ################################################################################
2832 class SuiteSrcFormat(object):
2833 def __init__(self, *args, **kwargs):
2837 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2839 __all__.append('SuiteSrcFormat')
2842 def get_suite_src_formats(suite, session=None):
2844 Returns list of allowed SrcFormat for C{suite}.
2847 @param suite: Suite name to search for
2849 @type session: Session
2850 @param session: Optional SQL session object (a temporary one will be
2851 generated if not supplied)
2854 @return: the list of allowed source formats for I{suite}
2857 q = session.query(SrcFormat)
2858 q = q.join(SuiteSrcFormat)
2859 q = q.join(Suite).filter_by(suite_name=suite)
2860 q = q.order_by('format_name')
2864 __all__.append('get_suite_src_formats')
2866 ################################################################################
2868 class Uid(ORMObject):
2869 def __init__(self, uid = None, name = None):
2873 def __eq__(self, val):
2874 if isinstance(val, str):
2875 return (self.uid == val)
2876 # This signals to use the normal comparison operator
2877 return NotImplemented
2879 def __ne__(self, val):
2880 if isinstance(val, str):
2881 return (self.uid != val)
2882 # This signals to use the normal comparison operator
2883 return NotImplemented
2885 def properties(self):
2886 return ['uid', 'name', 'fingerprint']
2888 def not_null_constraints(self):
2891 __all__.append('Uid')
2894 def get_or_set_uid(uidname, session=None):
2896 Returns uid object for given uidname.
2898 If no matching uidname is found, a row is inserted.
2900 @type uidname: string
2901 @param uidname: The uid to add
2903 @type session: SQLAlchemy
2904 @param session: Optional SQL session object (a temporary one will be
2905 generated if not supplied). If not passed, a commit will be performed at
2906 the end of the function, otherwise the caller is responsible for commiting.
2909 @return: the uid object for the given uidname
2912 q = session.query(Uid).filter_by(uid=uidname)
2916 except NoResultFound:
2920 session.commit_or_flush()
2925 __all__.append('get_or_set_uid')
2928 def get_uid_from_fingerprint(fpr, session=None):
2929 q = session.query(Uid)
2930 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2934 except NoResultFound:
2937 __all__.append('get_uid_from_fingerprint')
2939 ################################################################################
2941 class UploadBlock(object):
2942 def __init__(self, *args, **kwargs):
2946 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2948 __all__.append('UploadBlock')
2950 ################################################################################
2952 class MetadataKey(ORMObject):
2953 def __init__(self, key = None):
2956 def properties(self):
2959 def not_null_constraints(self):
2962 __all__.append('MetadataKey')
2965 def get_or_set_metadatakey(keyname, session=None):
2967 Returns MetadataKey object for given uidname.
2969 If no matching keyname is found, a row is inserted.
2971 @type uidname: string
2972 @param uidname: The keyname to add
2974 @type session: SQLAlchemy
2975 @param session: Optional SQL session object (a temporary one will be
2976 generated if not supplied). If not passed, a commit will be performed at
2977 the end of the function, otherwise the caller is responsible for commiting.
2980 @return: the metadatakey object for the given keyname
2983 q = session.query(MetadataKey).filter_by(key=keyname)
2987 except NoResultFound:
2988 ret = MetadataKey(keyname)
2990 session.commit_or_flush()
2994 __all__.append('get_or_set_metadatakey')
2996 ################################################################################
2998 class BinaryMetadata(ORMObject):
2999 def __init__(self, key = None, value = None, binary = None):
3002 self.binary = binary
3004 def properties(self):
3005 return ['binary', 'key', 'value']
3007 def not_null_constraints(self):
3010 __all__.append('BinaryMetadata')
3012 ################################################################################
3014 class SourceMetadata(ORMObject):
3015 def __init__(self, key = None, value = None, source = None):
3018 self.source = source
3020 def properties(self):
3021 return ['source', 'key', 'value']
3023 def not_null_constraints(self):
3026 __all__.append('SourceMetadata')
3028 ################################################################################
3030 class VersionCheck(ORMObject):
3031 def __init__(self, *args, **kwargs):
3034 def properties(self):
3035 #return ['suite_id', 'check', 'reference_id']
3038 def not_null_constraints(self):
3039 return ['suite', 'check', 'reference']
3041 __all__.append('VersionCheck')
3044 def get_version_checks(suite_name, check = None, session = None):
3045 suite = get_suite(suite_name, session)
3048 q = session.query(VersionCheck).filter_by(suite=suite)
3050 q = q.filter_by(check=check)
3053 __all__.append('get_version_checks')
3055 ################################################################################
3057 class DBConn(object):
3059 database module init.
3063 def __init__(self, *args, **kwargs):
3064 self.__dict__ = self.__shared_state
3066 if not getattr(self, 'initialised', False):
3067 self.initialised = True
3068 self.debug = kwargs.has_key('debug')
3071 def __setuptables(self):
3078 'binaries_metadata',
3082 'build_queue_files',
3087 'changes_pending_binaries',
3088 'changes_pending_files',
3089 'changes_pending_source',
3090 'changes_pending_files_map',
3091 'changes_pending_source_files',
3092 'changes_pool_files',
3094 'extra_src_references',
3103 # TODO: the maintainer column in table override should be removed.
3117 'suite_architectures',
3118 'suite_build_queue_copy',
3119 'suite_src_formats',
3126 'almost_obsolete_all_associations',
3127 'almost_obsolete_src_associations',
3128 'any_associations_source',
3129 'bin_assoc_by_arch',
3130 'bin_associations_binaries',
3131 'binaries_suite_arch',
3132 'binfiles_suite_component_arch',
3135 'newest_all_associations',
3136 'newest_any_associations',
3138 'newest_src_association',
3139 'obsolete_all_associations',
3140 'obsolete_any_associations',
3141 'obsolete_any_by_all_associations',
3142 'obsolete_src_associations',
3144 'src_associations_bin',
3145 'src_associations_src',
3146 'suite_arch_by_name',
3149 for table_name in tables:
3150 table = Table(table_name, self.db_meta, \
3151 autoload=True, useexisting=True)
3152 setattr(self, 'tbl_%s' % table_name, table)
3154 for view_name in views:
3155 view = Table(view_name, self.db_meta, autoload=True)
3156 setattr(self, 'view_%s' % view_name, view)
3158 def __setupmappers(self):
3159 mapper(Architecture, self.tbl_architecture,
3160 properties = dict(arch_id = self.tbl_architecture.c.id,
3161 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3162 order_by='suite_name',
3163 backref=backref('architectures', order_by='arch_string'))),
3164 extension = validator)
3166 mapper(Archive, self.tbl_archive,
3167 properties = dict(archive_id = self.tbl_archive.c.id,
3168 archive_name = self.tbl_archive.c.name))
3170 mapper(BuildQueue, self.tbl_build_queue,
3171 properties = dict(queue_id = self.tbl_build_queue.c.id))
3173 mapper(BuildQueueFile, self.tbl_build_queue_files,
3174 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3175 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3177 mapper(DBBinary, self.tbl_binaries,
3178 properties = dict(binary_id = self.tbl_binaries.c.id,
3179 package = self.tbl_binaries.c.package,
3180 version = self.tbl_binaries.c.version,
3181 maintainer_id = self.tbl_binaries.c.maintainer,
3182 maintainer = relation(Maintainer),
3183 source_id = self.tbl_binaries.c.source,
3184 source = relation(DBSource, backref='binaries'),
3185 arch_id = self.tbl_binaries.c.architecture,
3186 architecture = relation(Architecture),
3187 poolfile_id = self.tbl_binaries.c.file,
3188 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3189 binarytype = self.tbl_binaries.c.type,
3190 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3191 fingerprint = relation(Fingerprint),
3192 install_date = self.tbl_binaries.c.install_date,
3193 suites = relation(Suite, secondary=self.tbl_bin_associations,
3194 backref=backref('binaries', lazy='dynamic')),
3195 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3196 backref=backref('extra_binary_references', lazy='dynamic')),
3197 key = relation(BinaryMetadata, cascade='all',
3198 collection_class=attribute_mapped_collection('key'))),
3199 extension = validator)
3201 mapper(BinaryACL, self.tbl_binary_acl,
3202 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3204 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3205 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3206 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3207 architecture = relation(Architecture)))
3209 mapper(Component, self.tbl_component,
3210 properties = dict(component_id = self.tbl_component.c.id,
3211 component_name = self.tbl_component.c.name),
3212 extension = validator)
3214 mapper(DBConfig, self.tbl_config,
3215 properties = dict(config_id = self.tbl_config.c.id))
3217 mapper(DSCFile, self.tbl_dsc_files,
3218 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3219 source_id = self.tbl_dsc_files.c.source,
3220 source = relation(DBSource),
3221 poolfile_id = self.tbl_dsc_files.c.file,
3222 poolfile = relation(PoolFile)))
3224 mapper(PoolFile, self.tbl_files,
3225 properties = dict(file_id = self.tbl_files.c.id,
3226 filesize = self.tbl_files.c.size,
3227 location_id = self.tbl_files.c.location,
3228 location = relation(Location,
3229 # using lazy='dynamic' in the back
3230 # reference because we have A LOT of
3231 # files in one location
3232 backref=backref('files', lazy='dynamic'))),
3233 extension = validator)
3235 mapper(Fingerprint, self.tbl_fingerprint,
3236 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3237 uid_id = self.tbl_fingerprint.c.uid,
3238 uid = relation(Uid),
3239 keyring_id = self.tbl_fingerprint.c.keyring,
3240 keyring = relation(Keyring),
3241 source_acl = relation(SourceACL),
3242 binary_acl = relation(BinaryACL)),
3243 extension = validator)
3245 mapper(Keyring, self.tbl_keyrings,
3246 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3247 keyring_id = self.tbl_keyrings.c.id))
3249 mapper(DBChange, self.tbl_changes,
3250 properties = dict(change_id = self.tbl_changes.c.id,
3251 poolfiles = relation(PoolFile,
3252 secondary=self.tbl_changes_pool_files,
3253 backref="changeslinks"),
3254 seen = self.tbl_changes.c.seen,
3255 source = self.tbl_changes.c.source,
3256 binaries = self.tbl_changes.c.binaries,
3257 architecture = self.tbl_changes.c.architecture,
3258 distribution = self.tbl_changes.c.distribution,
3259 urgency = self.tbl_changes.c.urgency,
3260 maintainer = self.tbl_changes.c.maintainer,
3261 changedby = self.tbl_changes.c.changedby,
3262 date = self.tbl_changes.c.date,
3263 version = self.tbl_changes.c.version,
3264 files = relation(ChangePendingFile,
3265 secondary=self.tbl_changes_pending_files_map,
3266 backref="changesfile"),
3267 in_queue_id = self.tbl_changes.c.in_queue,
3268 in_queue = relation(PolicyQueue,
3269 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3270 approved_for_id = self.tbl_changes.c.approved_for))
3272 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3273 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3275 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3276 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3277 filename = self.tbl_changes_pending_files.c.filename,
3278 size = self.tbl_changes_pending_files.c.size,
3279 md5sum = self.tbl_changes_pending_files.c.md5sum,
3280 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3281 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3283 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3284 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3285 change = relation(DBChange),
3286 maintainer = relation(Maintainer,
3287 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3288 changedby = relation(Maintainer,
3289 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3290 fingerprint = relation(Fingerprint),
3291 source_files = relation(ChangePendingFile,
3292 secondary=self.tbl_changes_pending_source_files,
3293 backref="pending_sources")))
3296 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3297 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3298 keyring = relation(Keyring, backref="keyring_acl_map"),
3299 architecture = relation(Architecture)))
3301 mapper(Location, self.tbl_location,
3302 properties = dict(location_id = self.tbl_location.c.id,
3303 component_id = self.tbl_location.c.component,
3304 component = relation(Component, backref='location'),
3305 archive_id = self.tbl_location.c.archive,
3306 archive = relation(Archive),
3307 # FIXME: the 'type' column is old cruft and
3308 # should be removed in the future.
3309 archive_type = self.tbl_location.c.type),
3310 extension = validator)
3312 mapper(Maintainer, self.tbl_maintainer,
3313 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3314 maintains_sources = relation(DBSource, backref='maintainer',
3315 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3316 changed_sources = relation(DBSource, backref='changedby',
3317 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3318 extension = validator)
3320 mapper(NewComment, self.tbl_new_comments,
3321 properties = dict(comment_id = self.tbl_new_comments.c.id))
3323 mapper(Override, self.tbl_override,
3324 properties = dict(suite_id = self.tbl_override.c.suite,
3325 suite = relation(Suite, \
3326 backref=backref('overrides', lazy='dynamic')),
3327 package = self.tbl_override.c.package,
3328 component_id = self.tbl_override.c.component,
3329 component = relation(Component, \
3330 backref=backref('overrides', lazy='dynamic')),
3331 priority_id = self.tbl_override.c.priority,
3332 priority = relation(Priority, \
3333 backref=backref('overrides', lazy='dynamic')),
3334 section_id = self.tbl_override.c.section,
3335 section = relation(Section, \
3336 backref=backref('overrides', lazy='dynamic')),
3337 overridetype_id = self.tbl_override.c.type,
3338 overridetype = relation(OverrideType, \
3339 backref=backref('overrides', lazy='dynamic'))))
3341 mapper(OverrideType, self.tbl_override_type,
3342 properties = dict(overridetype = self.tbl_override_type.c.type,
3343 overridetype_id = self.tbl_override_type.c.id))
3345 mapper(PolicyQueue, self.tbl_policy_queue,
3346 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3348 mapper(Priority, self.tbl_priority,
3349 properties = dict(priority_id = self.tbl_priority.c.id))
3351 mapper(Section, self.tbl_section,
3352 properties = dict(section_id = self.tbl_section.c.id,
3353 section=self.tbl_section.c.section))
3355 mapper(DBSource, self.tbl_source,
3356 properties = dict(source_id = self.tbl_source.c.id,
3357 version = self.tbl_source.c.version,
3358 maintainer_id = self.tbl_source.c.maintainer,
3359 poolfile_id = self.tbl_source.c.file,
3360 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3361 fingerprint_id = self.tbl_source.c.sig_fpr,
3362 fingerprint = relation(Fingerprint),
3363 changedby_id = self.tbl_source.c.changedby,
3364 srcfiles = relation(DSCFile,
3365 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3366 suites = relation(Suite, secondary=self.tbl_src_associations,
3367 backref=backref('sources', lazy='dynamic')),
3368 uploaders = relation(Maintainer,
3369 secondary=self.tbl_src_uploaders),
3370 key = relation(SourceMetadata, cascade='all',
3371 collection_class=attribute_mapped_collection('key'))),
3372 extension = validator)
3374 mapper(SourceACL, self.tbl_source_acl,
3375 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3377 mapper(SrcFormat, self.tbl_src_format,
3378 properties = dict(src_format_id = self.tbl_src_format.c.id,
3379 format_name = self.tbl_src_format.c.format_name))
3381 mapper(Suite, self.tbl_suite,
3382 properties = dict(suite_id = self.tbl_suite.c.id,
3383 policy_queue = relation(PolicyQueue),
3384 copy_queues = relation(BuildQueue,
3385 secondary=self.tbl_suite_build_queue_copy)),
3386 extension = validator)
3388 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3389 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3390 suite = relation(Suite, backref='suitesrcformats'),
3391 src_format_id = self.tbl_suite_src_formats.c.src_format,
3392 src_format = relation(SrcFormat)))
3394 mapper(Uid, self.tbl_uid,
3395 properties = dict(uid_id = self.tbl_uid.c.id,
3396 fingerprint = relation(Fingerprint)),
3397 extension = validator)
3399 mapper(UploadBlock, self.tbl_upload_blocks,
3400 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3401 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3402 uid = relation(Uid, backref="uploadblocks")))
3404 mapper(BinContents, self.tbl_bin_contents,
3406 binary = relation(DBBinary,
3407 backref=backref('contents', lazy='dynamic', cascade='all')),
3408 file = self.tbl_bin_contents.c.file))
3410 mapper(SrcContents, self.tbl_src_contents,
3412 source = relation(DBSource,
3413 backref=backref('contents', lazy='dynamic', cascade='all')),
3414 file = self.tbl_src_contents.c.file))
3416 mapper(MetadataKey, self.tbl_metadata_keys,
3418 key_id = self.tbl_metadata_keys.c.key_id,
3419 key = self.tbl_metadata_keys.c.key))
3421 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3423 binary_id = self.tbl_binaries_metadata.c.bin_id,
3424 binary = relation(DBBinary),
3425 key_id = self.tbl_binaries_metadata.c.key_id,
3426 key = relation(MetadataKey),
3427 value = self.tbl_binaries_metadata.c.value))
3429 mapper(SourceMetadata, self.tbl_source_metadata,
3431 source_id = self.tbl_source_metadata.c.src_id,
3432 source = relation(DBSource),
3433 key_id = self.tbl_source_metadata.c.key_id,
3434 key = relation(MetadataKey),
3435 value = self.tbl_source_metadata.c.value))
3437 mapper(VersionCheck, self.tbl_version_check,
3439 suite_id = self.tbl_version_check.c.suite,
3440 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
3441 reference_id = self.tbl_version_check.c.reference,
3442 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
3444 ## Connection functions
3445 def __createconn(self):
3446 from config import Config
3448 if cnf.has_key("DB::Service"):
3449 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3450 elif cnf.has_key("DB::Host"):
3452 connstr = "postgresql://%s" % cnf["DB::Host"]
3453 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3454 connstr += ":%s" % cnf["DB::Port"]
3455 connstr += "/%s" % cnf["DB::Name"]
3458 connstr = "postgresql:///%s" % cnf["DB::Name"]
3459 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3460 connstr += "?port=%s" % cnf["DB::Port"]
3462 engine_args = { 'echo': self.debug }
3463 if cnf.has_key('DB::PoolSize'):
3464 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3465 if cnf.has_key('DB::MaxOverflow'):
3466 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3467 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3468 cnf['DB::Unicode'] == 'false':
3469 engine_args['use_native_unicode'] = False
3471 # Monkey patch a new dialect in in order to support service= syntax
3472 import sqlalchemy.dialects.postgresql
3473 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3474 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3475 def create_connect_args(self, url):
3476 if str(url).startswith('postgresql://service='):
3478 servicename = str(url)[21:]
3479 return (['service=%s' % servicename], {})
3481 return PGDialect_psycopg2.create_connect_args(self, url)
3483 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3485 self.db_pg = create_engine(connstr, **engine_args)
3486 self.db_meta = MetaData()
3487 self.db_meta.bind = self.db_pg
3488 self.db_smaker = sessionmaker(bind=self.db_pg,
3492 self.__setuptables()
3493 self.__setupmappers()
3494 self.pid = os.getpid()
3497 # reinitialize DBConn in new processes
3498 if self.pid != os.getpid():
3501 return self.db_smaker()
3503 __all__.append('DBConn')