5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 import daklib.daksubprocess
39 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from tarfile import TarFile
57 from inspect import getargspec
60 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
62 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
63 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
64 from sqlalchemy import types as sqltypes
65 from sqlalchemy.orm.collections import attribute_mapped_collection
66 from sqlalchemy.ext.associationproxy import association_proxy
68 # Don't remove this, we re-export the exceptions to scripts which import us
69 from sqlalchemy.exc import *
70 from sqlalchemy.orm.exc import NoResultFound
72 # Only import Config until Queue stuff is changed to store its config
74 from config import Config
75 from textutils import fix_maintainer
76 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
78 # suppress some deprecation warnings in squeeze related to sqlalchemy
80 warnings.filterwarnings('ignore', \
81 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
83 warnings.filterwarnings('ignore', \
84 "Predicate of partial index .* ignored during reflection", \
88 ################################################################################
90 # Patch in support for the debversion field type so that it works during
94 # that is for sqlalchemy 0.6
95 UserDefinedType = sqltypes.UserDefinedType
97 # this one for sqlalchemy 0.5
98 UserDefinedType = sqltypes.TypeEngine
100 class DebVersion(UserDefinedType):
101 def get_col_spec(self):
104 def bind_processor(self, dialect):
107 # ' = None' is needed for sqlalchemy 0.5:
108 def result_processor(self, dialect, coltype = None):
111 sa_major_version = sqlalchemy.__version__[0:3]
112 if sa_major_version in ["0.5", "0.6", "0.7", "0.8", "0.9"]:
113 from sqlalchemy.databases import postgres
114 postgres.ischema_names['debversion'] = DebVersion
116 raise Exception("dak only ported to SQLA versions 0.5 to 0.9. See daklib/dbconn.py")
118 ################################################################################
120 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
122 ################################################################################
124 def session_wrapper(fn):
126 Wrapper around common ".., session=None):" handling. If the wrapped
127 function is called without passing 'session', we create a local one
128 and destroy it when the function ends.
130 Also attaches a commit_or_flush method to the session; if we created a
131 local session, this is a synonym for session.commit(), otherwise it is a
132 synonym for session.flush().
135 def wrapped(*args, **kwargs):
136 private_transaction = False
138 # Find the session object
139 session = kwargs.get('session')
142 if len(args) <= len(getargspec(fn)[0]) - 1:
143 # No session specified as last argument or in kwargs
144 private_transaction = True
145 session = kwargs['session'] = DBConn().session()
147 # Session is last argument in args
151 session = args[-1] = DBConn().session()
152 private_transaction = True
154 if private_transaction:
155 session.commit_or_flush = session.commit
157 session.commit_or_flush = session.flush
160 return fn(*args, **kwargs)
162 if private_transaction:
163 # We created a session; close it.
166 wrapped.__doc__ = fn.__doc__
167 wrapped.func_name = fn.func_name
171 __all__.append('session_wrapper')
173 ################################################################################
175 class ORMObject(object):
177 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
178 derived classes must implement the properties() method.
181 def properties(self):
183 This method should be implemented by all derived classes and returns a
184 list of the important properties. The properties 'created' and
185 'modified' will be added automatically. A suffix '_count' should be
186 added to properties that are lists or query objects. The most important
187 property name should be returned as the first element in the list
188 because it is used by repr().
194 Returns a JSON representation of the object based on the properties
195 returned from the properties() method.
198 # add created and modified
199 all_properties = self.properties() + ['created', 'modified']
200 for property in all_properties:
201 # check for list or query
202 if property[-6:] == '_count':
203 real_property = property[:-6]
204 if not hasattr(self, real_property):
206 value = getattr(self, real_property)
207 if hasattr(value, '__len__'):
210 elif hasattr(value, 'count'):
211 # query (but not during validation)
212 if self.in_validation:
214 value = value.count()
216 raise KeyError('Do not understand property %s.' % property)
218 if not hasattr(self, property):
221 value = getattr(self, property)
225 elif isinstance(value, ORMObject):
226 # use repr() for ORMObject types
229 # we want a string for all other types because json cannot
232 data[property] = value
233 return json.dumps(data)
237 Returns the name of the class.
239 return type(self).__name__
243 Returns a short string representation of the object using the first
244 element from the properties() method.
246 primary_property = self.properties()[0]
247 value = getattr(self, primary_property)
248 return '<%s %s>' % (self.classname(), str(value))
252 Returns a human readable form of the object using the properties()
255 return '<%s %s>' % (self.classname(), self.json())
257 def not_null_constraints(self):
259 Returns a list of properties that must be not NULL. Derived classes
260 should override this method if needed.
264 validation_message = \
265 "Validation failed because property '%s' must not be empty in object\n%s"
267 in_validation = False
271 This function validates the not NULL constraints as returned by
272 not_null_constraints(). It raises the DBUpdateError exception if
275 for property in self.not_null_constraints():
276 # TODO: It is a bit awkward that the mapper configuration allow
277 # directly setting the numeric _id columns. We should get rid of it
279 if hasattr(self, property + '_id') and \
280 getattr(self, property + '_id') is not None:
282 if not hasattr(self, property) or getattr(self, property) is None:
283 # str() might lead to races due to a 2nd flush
284 self.in_validation = True
285 message = self.validation_message % (property, str(self))
286 self.in_validation = False
287 raise DBUpdateError(message)
291 def get(cls, primary_key, session = None):
293 This is a support function that allows getting an object by its primary
296 Architecture.get(3[, session])
298 instead of the more verbose
300 session.query(Architecture).get(3)
302 return session.query(cls).get(primary_key)
304 def session(self, replace = False):
306 Returns the current session that is associated with the object. May
307 return None is object is in detached state.
310 return object_session(self)
312 def clone(self, session = None):
314 Clones the current object in a new session and returns the new clone. A
315 fresh session is created if the optional session parameter is not
316 provided. The function will fail if a session is provided and has
319 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
320 an existing object to allow several threads to work with their own
321 instances of an ORMObject.
323 WARNING: Only persistent (committed) objects can be cloned. Changes
324 made to the original object that are not committed yet will get lost.
325 The session of the new object will always be rolled back to avoid
329 if self.session() is None:
330 raise RuntimeError( \
331 'Method clone() failed for detached object:\n%s' % self)
332 self.session().flush()
333 mapper = object_mapper(self)
334 primary_key = mapper.primary_key_from_instance(self)
335 object_class = self.__class__
337 session = DBConn().session()
338 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
339 raise RuntimeError( \
340 'Method clone() failed due to unflushed changes in session.')
341 new_object = session.query(object_class).get(primary_key)
343 if new_object is None:
344 raise RuntimeError( \
345 'Method clone() failed for non-persistent object:\n%s' % self)
348 __all__.append('ORMObject')
350 ################################################################################
352 class Validator(MapperExtension):
354 This class calls the validate() method for each instance for the
355 'before_update' and 'before_insert' events. A global object validator is
356 used for configuring the individual mappers.
359 def before_update(self, mapper, connection, instance):
363 def before_insert(self, mapper, connection, instance):
367 validator = Validator()
369 ################################################################################
371 class ACL(ORMObject):
373 return "<ACL {0}>".format(self.name)
375 __all__.append('ACL')
377 class ACLPerSource(ORMObject):
379 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
381 __all__.append('ACLPerSource')
383 ################################################################################
385 class Architecture(ORMObject):
386 def __init__(self, arch_string = None, description = None):
387 self.arch_string = arch_string
388 self.description = description
390 def __eq__(self, val):
391 if isinstance(val, str):
392 return (self.arch_string== val)
393 # This signals to use the normal comparison operator
394 return NotImplemented
396 def __ne__(self, val):
397 if isinstance(val, str):
398 return (self.arch_string != val)
399 # This signals to use the normal comparison operator
400 return NotImplemented
402 def properties(self):
403 return ['arch_string', 'arch_id', 'suites_count']
405 def not_null_constraints(self):
406 return ['arch_string']
408 __all__.append('Architecture')
411 def get_architecture(architecture, session=None):
413 Returns database id for given C{architecture}.
415 @type architecture: string
416 @param architecture: The name of the architecture
418 @type session: Session
419 @param session: Optional SQLA session object (a temporary one will be
420 generated if not supplied)
423 @return: Architecture object for the given arch (None if not present)
426 q = session.query(Architecture).filter_by(arch_string=architecture)
430 except NoResultFound:
433 __all__.append('get_architecture')
435 ################################################################################
437 class Archive(object):
438 def __init__(self, *args, **kwargs):
442 return '<Archive %s>' % self.archive_name
444 __all__.append('Archive')
447 def get_archive(archive, session=None):
449 returns database id for given C{archive}.
451 @type archive: string
452 @param archive: the name of the arhive
454 @type session: Session
455 @param session: Optional SQLA session object (a temporary one will be
456 generated if not supplied)
459 @return: Archive object for the given name (None if not present)
462 archive = archive.lower()
464 q = session.query(Archive).filter_by(archive_name=archive)
468 except NoResultFound:
471 __all__.append('get_archive')
473 ################################################################################
475 class ArchiveFile(object):
476 def __init__(self, archive=None, component=None, file=None):
477 self.archive = archive
478 self.component = component
482 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
484 __all__.append('ArchiveFile')
486 ################################################################################
488 class BinContents(ORMObject):
489 def __init__(self, file = None, binary = None):
493 def properties(self):
494 return ['file', 'binary']
496 __all__.append('BinContents')
498 ################################################################################
500 class DBBinary(ORMObject):
501 def __init__(self, package = None, source = None, version = None, \
502 maintainer = None, architecture = None, poolfile = None, \
503 binarytype = 'deb', fingerprint=None):
504 self.package = package
506 self.version = version
507 self.maintainer = maintainer
508 self.architecture = architecture
509 self.poolfile = poolfile
510 self.binarytype = binarytype
511 self.fingerprint = fingerprint
515 return self.binary_id
517 def properties(self):
518 return ['package', 'version', 'maintainer', 'source', 'architecture', \
519 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
520 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
522 def not_null_constraints(self):
523 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
526 metadata = association_proxy('key', 'value')
528 def scan_contents(self):
530 Yields the contents of the package. Only regular files are yielded and
531 the path names are normalized after converting them from either utf-8
532 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
533 package does not contain any regular file.
535 fullpath = self.poolfile.fullpath
536 dpkg_cmd = ('dpkg-deb', '--fsys-tarfile', fullpath)
537 dpkg = daklib.daksubprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE)
538 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
539 for member in tar.getmembers():
540 if not member.isdir():
541 name = normpath(member.name)
542 # enforce proper utf-8 encoding
545 except UnicodeDecodeError:
546 name = name.decode('iso8859-1').encode('utf-8')
552 def read_control(self):
554 Reads the control information from a binary.
557 @return: stanza text of the control section.
560 fullpath = self.poolfile.fullpath
561 with open(fullpath, 'r') as deb_file:
562 return utils.deb_extract_control(deb_file)
564 def read_control_fields(self):
566 Reads the control information from a binary and return
570 @return: fields of the control section as a dictionary.
572 stanza = self.read_control()
573 return apt_pkg.TagSection(stanza)
577 session = object_session(self)
578 query = session.query(BinaryMetadata).filter_by(binary=self)
579 return MetadataProxy(session, query)
581 __all__.append('DBBinary')
584 def get_suites_binary_in(package, session=None):
586 Returns list of Suite objects which given C{package} name is in
589 @param package: DBBinary package name to search for
592 @return: list of Suite objects for the given package
595 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
597 __all__.append('get_suites_binary_in')
600 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
602 Returns the component name of the newest binary package in suite_list or
603 None if no package is found. The result can be optionally filtered by a list
604 of architecture names.
607 @param package: DBBinary package name to search for
609 @type suite_list: list of str
610 @param suite_list: list of suite_name items
612 @type arch_list: list of str
613 @param arch_list: optional list of arch_string items that defaults to []
615 @rtype: str or NoneType
616 @return: name of component or None
619 q = session.query(DBBinary).filter_by(package = package). \
620 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
621 if len(arch_list) > 0:
622 q = q.join(DBBinary.architecture). \
623 filter(Architecture.arch_string.in_(arch_list))
624 binary = q.order_by(desc(DBBinary.version)).first()
628 return binary.poolfile.component.component_name
630 __all__.append('get_component_by_package_suite')
632 ################################################################################
634 class BuildQueue(object):
635 def __init__(self, *args, **kwargs):
639 return '<BuildQueue %s>' % self.queue_name
641 __all__.append('BuildQueue')
643 ################################################################################
645 class Component(ORMObject):
646 def __init__(self, component_name = None):
647 self.component_name = component_name
649 def __eq__(self, val):
650 if isinstance(val, str):
651 return (self.component_name == val)
652 # This signals to use the normal comparison operator
653 return NotImplemented
655 def __ne__(self, val):
656 if isinstance(val, str):
657 return (self.component_name != val)
658 # This signals to use the normal comparison operator
659 return NotImplemented
661 def properties(self):
662 return ['component_name', 'component_id', 'description', \
663 'meets_dfsg', 'overrides_count']
665 def not_null_constraints(self):
666 return ['component_name']
669 __all__.append('Component')
672 def get_component(component, session=None):
674 Returns database id for given C{component}.
676 @type component: string
677 @param component: The name of the override type
680 @return: the database id for the given component
683 component = component.lower()
685 q = session.query(Component).filter_by(component_name=component)
689 except NoResultFound:
692 __all__.append('get_component')
694 def get_mapped_component_name(component_name):
696 for m in cnf.value_list("ComponentMappings"):
697 (src, dst) = m.split()
698 if component_name == src:
700 return component_name
702 __all__.append('get_mapped_component_name')
705 def get_mapped_component(component_name, session=None):
706 """get component after mappings
708 Evaluate component mappings from ComponentMappings in dak.conf for the
709 given component name.
711 @todo: ansgar wants to get rid of this. It's currently only used for
714 @type component_name: str
715 @param component_name: component name
717 @param session: database session
719 @rtype: L{daklib.dbconn.Component} or C{None}
720 @return: component after applying maps or C{None}
722 component_name = get_mapped_component_name(component_name)
723 component = session.query(Component).filter_by(component_name=component_name).first()
726 __all__.append('get_mapped_component')
729 def get_component_names(session=None):
731 Returns list of strings of component names.
734 @return: list of strings of component names
737 return [ x.component_name for x in session.query(Component).all() ]
739 __all__.append('get_component_names')
741 ################################################################################
743 class DBConfig(object):
744 def __init__(self, *args, **kwargs):
748 return '<DBConfig %s>' % self.name
750 __all__.append('DBConfig')
752 ################################################################################
754 class DSCFile(object):
755 def __init__(self, *args, **kwargs):
759 return '<DSCFile %s>' % self.dscfile_id
761 __all__.append('DSCFile')
764 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
766 Returns a list of DSCFiles which may be empty
768 @type dscfile_id: int (optional)
769 @param dscfile_id: the dscfile_id of the DSCFiles to find
771 @type source_id: int (optional)
772 @param source_id: the source id related to the DSCFiles to find
774 @type poolfile_id: int (optional)
775 @param poolfile_id: the poolfile id related to the DSCFiles to find
778 @return: Possibly empty list of DSCFiles
781 q = session.query(DSCFile)
783 if dscfile_id is not None:
784 q = q.filter_by(dscfile_id=dscfile_id)
786 if source_id is not None:
787 q = q.filter_by(source_id=source_id)
789 if poolfile_id is not None:
790 q = q.filter_by(poolfile_id=poolfile_id)
794 __all__.append('get_dscfiles')
796 ################################################################################
798 class ExternalOverride(ORMObject):
799 def __init__(self, *args, **kwargs):
803 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
805 __all__.append('ExternalOverride')
807 ################################################################################
809 class PoolFile(ORMObject):
810 def __init__(self, filename = None, filesize = -1, \
812 self.filename = filename
813 self.filesize = filesize
818 session = DBConn().session().object_session(self)
819 af = session.query(ArchiveFile).join(Archive) \
820 .filter(ArchiveFile.file == self) \
821 .order_by(Archive.tainted.desc()).first()
826 session = DBConn().session().object_session(self)
827 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
828 .group_by(ArchiveFile.component_id).one()
829 return session.query(Component).get(component_id)
833 return os.path.basename(self.filename)
835 def is_valid(self, filesize = -1, md5sum = None):
836 return self.filesize == long(filesize) and self.md5sum == md5sum
838 def properties(self):
839 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
840 'sha256sum', 'source', 'binary', 'last_used']
842 def not_null_constraints(self):
843 return ['filename', 'md5sum']
845 def identical_to(self, filename):
847 compare size and hash with the given file
850 @return: true if the given file has the same size and hash as this object; false otherwise
852 st = os.stat(filename)
853 if self.filesize != st.st_size:
856 f = open(filename, "r")
857 sha256sum = apt_pkg.sha256sum(f)
858 if sha256sum != self.sha256sum:
863 __all__.append('PoolFile')
865 ################################################################################
867 class Fingerprint(ORMObject):
868 def __init__(self, fingerprint = None):
869 self.fingerprint = fingerprint
871 def properties(self):
872 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
875 def not_null_constraints(self):
876 return ['fingerprint']
878 __all__.append('Fingerprint')
881 def get_fingerprint(fpr, session=None):
883 Returns Fingerprint object for given fpr.
886 @param fpr: The fpr to find / add
888 @type session: SQLAlchemy
889 @param session: Optional SQL session object (a temporary one will be
890 generated if not supplied).
893 @return: the Fingerprint object for the given fpr or None
896 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
900 except NoResultFound:
905 __all__.append('get_fingerprint')
908 def get_or_set_fingerprint(fpr, session=None):
910 Returns Fingerprint object for given fpr.
912 If no matching fpr is found, a row is inserted.
915 @param fpr: The fpr to find / add
917 @type session: SQLAlchemy
918 @param session: Optional SQL session object (a temporary one will be
919 generated if not supplied). If not passed, a commit will be performed at
920 the end of the function, otherwise the caller is responsible for commiting.
921 A flush will be performed either way.
924 @return: the Fingerprint object for the given fpr
927 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
931 except NoResultFound:
932 fingerprint = Fingerprint()
933 fingerprint.fingerprint = fpr
934 session.add(fingerprint)
935 session.commit_or_flush()
940 __all__.append('get_or_set_fingerprint')
942 ################################################################################
944 # Helper routine for Keyring class
945 def get_ldap_name(entry):
947 for k in ["cn", "mn", "sn"]:
949 if ret and ret[0] != "" and ret[0] != "-":
951 return " ".join(name)
953 ################################################################################
955 class Keyring(object):
959 def __init__(self, *args, **kwargs):
963 return '<Keyring %s>' % self.keyring_name
965 def de_escape_gpg_str(self, txt):
966 esclist = re.split(r'(\\x..)', txt)
967 for x in range(1,len(esclist),2):
968 esclist[x] = "%c" % (int(esclist[x][2:],16))
969 return "".join(esclist)
971 def parse_address(self, uid):
972 """parses uid and returns a tuple of real name and email address"""
974 (name, address) = email.Utils.parseaddr(uid)
975 name = re.sub(r"\s*[(].*[)]", "", name)
976 name = self.de_escape_gpg_str(name)
979 return (name, address)
981 def load_keys(self, keyring):
982 if not self.keyring_id:
983 raise Exception('Must be initialized with database information')
985 cmd = ["gpg", "--no-default-keyring", "--keyring", keyring,
986 "--with-colons", "--fingerprint", "--fingerprint"]
987 p = daklib.daksubprocess.Popen(cmd, stdout=subprocess.PIPE)
990 need_fingerprint = False
992 for line in p.stdout:
993 field = line.split(":")
994 if field[0] == "pub":
997 (name, addr) = self.parse_address(field[9])
999 self.keys[key]["email"] = addr
1000 self.keys[key]["name"] = name
1001 need_fingerprint = True
1002 elif key and field[0] == "uid":
1003 (name, addr) = self.parse_address(field[9])
1004 if "email" not in self.keys[key] and "@" in addr:
1005 self.keys[key]["email"] = addr
1006 self.keys[key]["name"] = name
1007 elif need_fingerprint and field[0] == "fpr":
1008 self.keys[key]["fingerprints"] = [field[9]]
1009 self.fpr_lookup[field[9]] = key
1010 need_fingerprint = False
1014 raise subprocess.CalledProcessError(r, cmd)
1016 def import_users_from_ldap(self, session):
1020 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1021 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1022 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1024 l = ldap.open(LDAPServer)
1027 l.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1028 l.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1029 l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1032 l.simple_bind_s("","")
1033 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1034 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1035 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1037 ldap_fin_uid_id = {}
1044 uid = entry["uid"][0]
1045 name = get_ldap_name(entry)
1046 fingerprints = entry["keyFingerPrint"]
1048 for f in fingerprints:
1049 key = self.fpr_lookup.get(f, None)
1050 if key not in self.keys:
1052 self.keys[key]["uid"] = uid
1056 keyid = get_or_set_uid(uid, session).uid_id
1057 byuid[keyid] = (uid, name)
1058 byname[uid] = (keyid, name)
1060 return (byname, byuid)
1062 def generate_users_from_keyring(self, format, session):
1066 for x in self.keys.keys():
1067 if "email" not in self.keys[x]:
1069 self.keys[x]["uid"] = format % "invalid-uid"
1071 uid = format % self.keys[x]["email"]
1072 keyid = get_or_set_uid(uid, session).uid_id
1073 byuid[keyid] = (uid, self.keys[x]["name"])
1074 byname[uid] = (keyid, self.keys[x]["name"])
1075 self.keys[x]["uid"] = uid
1078 uid = format % "invalid-uid"
1079 keyid = get_or_set_uid(uid, session).uid_id
1080 byuid[keyid] = (uid, "ungeneratable user id")
1081 byname[uid] = (keyid, "ungeneratable user id")
1083 return (byname, byuid)
1085 __all__.append('Keyring')
1088 def get_keyring(keyring, session=None):
1090 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1091 If C{keyring} already has an entry, simply return the existing Keyring
1093 @type keyring: string
1094 @param keyring: the keyring name
1097 @return: the Keyring object for this keyring
1100 q = session.query(Keyring).filter_by(keyring_name=keyring)
1104 except NoResultFound:
1107 __all__.append('get_keyring')
1110 def get_active_keyring_paths(session=None):
1113 @return: list of active keyring paths
1115 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1117 __all__.append('get_active_keyring_paths')
1119 ################################################################################
1121 class DBChange(object):
1122 def __init__(self, *args, **kwargs):
1126 return '<DBChange %s>' % self.changesname
1128 __all__.append('DBChange')
1131 def get_dbchange(filename, session=None):
1133 returns DBChange object for given C{filename}.
1135 @type filename: string
1136 @param filename: the name of the file
1138 @type session: Session
1139 @param session: Optional SQLA session object (a temporary one will be
1140 generated if not supplied)
1143 @return: DBChange object for the given filename (C{None} if not present)
1146 q = session.query(DBChange).filter_by(changesname=filename)
1150 except NoResultFound:
1153 __all__.append('get_dbchange')
1155 ################################################################################
1157 class Maintainer(ORMObject):
1158 def __init__(self, name = None):
1161 def properties(self):
1162 return ['name', 'maintainer_id']
1164 def not_null_constraints(self):
1167 def get_split_maintainer(self):
1168 if not hasattr(self, 'name') or self.name is None:
1169 return ('', '', '', '')
1171 return fix_maintainer(self.name.strip())
1173 __all__.append('Maintainer')
1176 def get_or_set_maintainer(name, session=None):
1178 Returns Maintainer object for given maintainer name.
1180 If no matching maintainer name is found, a row is inserted.
1183 @param name: The maintainer name to add
1185 @type session: SQLAlchemy
1186 @param session: Optional SQL session object (a temporary one will be
1187 generated if not supplied). If not passed, a commit will be performed at
1188 the end of the function, otherwise the caller is responsible for commiting.
1189 A flush will be performed either way.
1192 @return: the Maintainer object for the given maintainer
1195 q = session.query(Maintainer).filter_by(name=name)
1198 except NoResultFound:
1199 maintainer = Maintainer()
1200 maintainer.name = name
1201 session.add(maintainer)
1202 session.commit_or_flush()
1207 __all__.append('get_or_set_maintainer')
1210 def get_maintainer(maintainer_id, session=None):
1212 Return the name of the maintainer behind C{maintainer_id} or None if that
1213 maintainer_id is invalid.
1215 @type maintainer_id: int
1216 @param maintainer_id: the id of the maintainer
1219 @return: the Maintainer with this C{maintainer_id}
1222 return session.query(Maintainer).get(maintainer_id)
1224 __all__.append('get_maintainer')
1226 ################################################################################
1228 class NewComment(object):
1229 def __init__(self, *args, **kwargs):
1233 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1235 __all__.append('NewComment')
1238 def has_new_comment(policy_queue, package, version, session=None):
1240 Returns true if the given combination of C{package}, C{version} has a comment.
1242 @type package: string
1243 @param package: name of the package
1245 @type version: string
1246 @param version: package version
1248 @type session: Session
1249 @param session: Optional SQLA session object (a temporary one will be
1250 generated if not supplied)
1256 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1257 q = q.filter_by(package=package)
1258 q = q.filter_by(version=version)
1260 return bool(q.count() > 0)
1262 __all__.append('has_new_comment')
1265 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1267 Returns (possibly empty) list of NewComment objects for the given
1270 @type package: string (optional)
1271 @param package: name of the package
1273 @type version: string (optional)
1274 @param version: package version
1276 @type comment_id: int (optional)
1277 @param comment_id: An id of a comment
1279 @type session: Session
1280 @param session: Optional SQLA session object (a temporary one will be
1281 generated if not supplied)
1284 @return: A (possibly empty) list of NewComment objects will be returned
1287 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1288 if package is not None: q = q.filter_by(package=package)
1289 if version is not None: q = q.filter_by(version=version)
1290 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1294 __all__.append('get_new_comments')
1296 ################################################################################
1298 class Override(ORMObject):
1299 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1300 section = None, priority = None):
1301 self.package = package
1303 self.component = component
1304 self.overridetype = overridetype
1305 self.section = section
1306 self.priority = priority
1308 def properties(self):
1309 return ['package', 'suite', 'component', 'overridetype', 'section', \
1312 def not_null_constraints(self):
1313 return ['package', 'suite', 'component', 'overridetype', 'section']
1315 __all__.append('Override')
1318 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1320 Returns Override object for the given parameters
1322 @type package: string
1323 @param package: The name of the package
1325 @type suite: string, list or None
1326 @param suite: The name of the suite (or suites if a list) to limit to. If
1327 None, don't limit. Defaults to None.
1329 @type component: string, list or None
1330 @param component: The name of the component (or components if a list) to
1331 limit to. If None, don't limit. Defaults to None.
1333 @type overridetype: string, list or None
1334 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1335 limit to. If None, don't limit. Defaults to None.
1337 @type session: Session
1338 @param session: Optional SQLA session object (a temporary one will be
1339 generated if not supplied)
1342 @return: A (possibly empty) list of Override objects will be returned
1345 q = session.query(Override)
1346 q = q.filter_by(package=package)
1348 if suite is not None:
1349 if not isinstance(suite, list): suite = [suite]
1350 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1352 if component is not None:
1353 if not isinstance(component, list): component = [component]
1354 q = q.join(Component).filter(Component.component_name.in_(component))
1356 if overridetype is not None:
1357 if not isinstance(overridetype, list): overridetype = [overridetype]
1358 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1362 __all__.append('get_override')
1365 ################################################################################
1367 class OverrideType(ORMObject):
1368 def __init__(self, overridetype = None):
1369 self.overridetype = overridetype
1371 def properties(self):
1372 return ['overridetype', 'overridetype_id', 'overrides_count']
1374 def not_null_constraints(self):
1375 return ['overridetype']
1377 __all__.append('OverrideType')
1380 def get_override_type(override_type, session=None):
1382 Returns OverrideType object for given C{override type}.
1384 @type override_type: string
1385 @param override_type: The name of the override type
1387 @type session: Session
1388 @param session: Optional SQLA session object (a temporary one will be
1389 generated if not supplied)
1392 @return: the database id for the given override type
1395 q = session.query(OverrideType).filter_by(overridetype=override_type)
1399 except NoResultFound:
1402 __all__.append('get_override_type')
1404 ################################################################################
1406 class PolicyQueue(object):
1407 def __init__(self, *args, **kwargs):
1411 return '<PolicyQueue %s>' % self.queue_name
1413 __all__.append('PolicyQueue')
1416 def get_policy_queue(queuename, session=None):
1418 Returns PolicyQueue object for given C{queue name}
1420 @type queuename: string
1421 @param queuename: The name of the queue
1423 @type session: Session
1424 @param session: Optional SQLA session object (a temporary one will be
1425 generated if not supplied)
1428 @return: PolicyQueue object for the given queue
1431 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1435 except NoResultFound:
1438 __all__.append('get_policy_queue')
1440 ################################################################################
1442 class PolicyQueueUpload(object):
1443 def __cmp__(self, other):
1444 ret = cmp(self.changes.source, other.changes.source)
1446 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1448 if self.source is not None and other.source is None:
1450 elif self.source is None and other.source is not None:
1453 ret = cmp(self.changes.changesname, other.changes.changesname)
1456 __all__.append('PolicyQueueUpload')
1458 ################################################################################
1460 class PolicyQueueByhandFile(object):
1463 __all__.append('PolicyQueueByhandFile')
1465 ################################################################################
1467 class Priority(ORMObject):
1468 def __init__(self, priority = None, level = None):
1469 self.priority = priority
1472 def properties(self):
1473 return ['priority', 'priority_id', 'level', 'overrides_count']
1475 def not_null_constraints(self):
1476 return ['priority', 'level']
1478 def __eq__(self, val):
1479 if isinstance(val, str):
1480 return (self.priority == val)
1481 # This signals to use the normal comparison operator
1482 return NotImplemented
1484 def __ne__(self, val):
1485 if isinstance(val, str):
1486 return (self.priority != val)
1487 # This signals to use the normal comparison operator
1488 return NotImplemented
1490 __all__.append('Priority')
1493 def get_priority(priority, session=None):
1495 Returns Priority object for given C{priority name}.
1497 @type priority: string
1498 @param priority: The name of the priority
1500 @type session: Session
1501 @param session: Optional SQLA session object (a temporary one will be
1502 generated if not supplied)
1505 @return: Priority object for the given priority
1508 q = session.query(Priority).filter_by(priority=priority)
1512 except NoResultFound:
1515 __all__.append('get_priority')
1518 def get_priorities(session=None):
1520 Returns dictionary of priority names -> id mappings
1522 @type session: Session
1523 @param session: Optional SQL session object (a temporary one will be
1524 generated if not supplied)
1527 @return: dictionary of priority names -> id mappings
1531 q = session.query(Priority)
1533 ret[x.priority] = x.priority_id
1537 __all__.append('get_priorities')
1539 ################################################################################
1541 class Section(ORMObject):
1542 def __init__(self, section = None):
1543 self.section = section
1545 def properties(self):
1546 return ['section', 'section_id', 'overrides_count']
1548 def not_null_constraints(self):
1551 def __eq__(self, val):
1552 if isinstance(val, str):
1553 return (self.section == val)
1554 # This signals to use the normal comparison operator
1555 return NotImplemented
1557 def __ne__(self, val):
1558 if isinstance(val, str):
1559 return (self.section != val)
1560 # This signals to use the normal comparison operator
1561 return NotImplemented
1563 __all__.append('Section')
1566 def get_section(section, session=None):
1568 Returns Section object for given C{section name}.
1570 @type section: string
1571 @param section: The name of the section
1573 @type session: Session
1574 @param session: Optional SQLA session object (a temporary one will be
1575 generated if not supplied)
1578 @return: Section object for the given section name
1581 q = session.query(Section).filter_by(section=section)
1585 except NoResultFound:
1588 __all__.append('get_section')
1591 def get_sections(session=None):
1593 Returns dictionary of section names -> id mappings
1595 @type session: Session
1596 @param session: Optional SQL session object (a temporary one will be
1597 generated if not supplied)
1600 @return: dictionary of section names -> id mappings
1604 q = session.query(Section)
1606 ret[x.section] = x.section_id
1610 __all__.append('get_sections')
1612 ################################################################################
1614 class SignatureHistory(ORMObject):
1616 def from_signed_file(cls, signed_file):
1617 """signature history entry from signed file
1619 @type signed_file: L{daklib.gpg.SignedFile}
1620 @param signed_file: signed file
1622 @rtype: L{SignatureHistory}
1625 self.fingerprint = signed_file.primary_fingerprint
1626 self.signature_timestamp = signed_file.signature_timestamp
1627 self.contents_sha1 = signed_file.contents_sha1()
1630 def query(self, session):
1631 return session.query(SignatureHistory).filter_by(fingerprint=self.fingerprint, signature_timestamp=self.signature_timestamp, contents_sha1=self.contents_sha1).first()
1633 __all__.append('SignatureHistory')
1635 ################################################################################
1637 class SrcContents(ORMObject):
1638 def __init__(self, file = None, source = None):
1640 self.source = source
1642 def properties(self):
1643 return ['file', 'source']
1645 __all__.append('SrcContents')
1647 ################################################################################
1649 from debian.debfile import Deb822
1651 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1652 class Dak822(Deb822):
1653 def _internal_parser(self, sequence, fields=None):
1654 # The key is non-whitespace, non-colon characters before any colon.
1655 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1656 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1657 multi = re.compile(key_part + r"$")
1658 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1660 wanted_field = lambda f: fields is None or f in fields
1662 if isinstance(sequence, basestring):
1663 sequence = sequence.splitlines()
1667 for line in self.gpg_stripped_paragraph(sequence):
1668 m = single.match(line)
1671 self[curkey] = content
1673 if not wanted_field(m.group('key')):
1677 curkey = m.group('key')
1678 content = m.group('data')
1681 m = multi.match(line)
1684 self[curkey] = content
1686 if not wanted_field(m.group('key')):
1690 curkey = m.group('key')
1694 m = multidata.match(line)
1696 content += '\n' + line # XXX not m.group('data')?
1700 self[curkey] = content
1703 class DBSource(ORMObject):
1704 def __init__(self, source = None, version = None, maintainer = None, \
1705 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1706 self.source = source
1707 self.version = version
1708 self.maintainer = maintainer
1709 self.changedby = changedby
1710 self.poolfile = poolfile
1711 self.install_date = install_date
1712 self.fingerprint = fingerprint
1716 return self.source_id
1718 def properties(self):
1719 return ['source', 'source_id', 'maintainer', 'changedby', \
1720 'fingerprint', 'poolfile', 'version', 'suites_count', \
1721 'install_date', 'binaries_count', 'uploaders_count']
1723 def not_null_constraints(self):
1724 return ['source', 'version', 'install_date', 'maintainer', \
1725 'changedby', 'poolfile']
1727 def read_control_fields(self):
1729 Reads the control information from a dsc
1732 @return: fields is the dsc information in a dictionary form
1734 fullpath = self.poolfile.fullpath
1735 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1738 metadata = association_proxy('key', 'value')
1740 def scan_contents(self):
1742 Returns a set of names for non directories. The path names are
1743 normalized after converting them from either utf-8 or iso8859-1
1746 fullpath = self.poolfile.fullpath
1747 from daklib.contents import UnpackedSource
1748 unpacked = UnpackedSource(fullpath)
1750 for name in unpacked.get_all_filenames():
1751 # enforce proper utf-8 encoding
1753 name.decode('utf-8')
1754 except UnicodeDecodeError:
1755 name = name.decode('iso8859-1').encode('utf-8')
1761 session = object_session(self)
1762 query = session.query(SourceMetadata).filter_by(source=self)
1763 return MetadataProxy(session, query)
1765 __all__.append('DBSource')
1768 def get_suites_source_in(source, session=None):
1770 Returns list of Suite objects which given C{source} name is in
1773 @param source: DBSource package name to search for
1776 @return: list of Suite objects for the given source
1779 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
1781 __all__.append('get_suites_source_in')
1783 # FIXME: This function fails badly if it finds more than 1 source package and
1784 # its implementation is trivial enough to be inlined.
1786 def get_source_in_suite(source, suite_name, session=None):
1788 Returns a DBSource object for a combination of C{source} and C{suite_name}.
1790 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
1791 - B{suite_name} - a suite name, eg. I{unstable}
1793 @type source: string
1794 @param source: source package name
1796 @type suite_name: string
1797 @param suite: the suite name
1800 @return: the version for I{source} in I{suite}
1803 suite = get_suite(suite_name, session)
1807 return suite.get_sources(source).one()
1808 except NoResultFound:
1811 __all__.append('get_source_in_suite')
1814 def import_metadata_into_db(obj, session=None):
1816 This routine works on either DBBinary or DBSource objects and imports
1817 their metadata into the database
1819 fields = obj.read_control_fields()
1820 for k in fields.keys():
1823 val = str(fields[k])
1824 except UnicodeEncodeError:
1825 # Fall back to UTF-8
1827 val = fields[k].encode('utf-8')
1828 except UnicodeEncodeError:
1829 # Finally try iso8859-1
1830 val = fields[k].encode('iso8859-1')
1831 # Otherwise we allow the exception to percolate up and we cause
1832 # a reject as someone is playing silly buggers
1834 obj.metadata[get_or_set_metadatakey(k, session)] = val
1836 session.commit_or_flush()
1838 __all__.append('import_metadata_into_db')
1840 ################################################################################
1842 class SrcFormat(object):
1843 def __init__(self, *args, **kwargs):
1847 return '<SrcFormat %s>' % (self.format_name)
1849 __all__.append('SrcFormat')
1851 ################################################################################
1853 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
1854 ('SuiteID', 'suite_id'),
1855 ('Version', 'version'),
1856 ('Origin', 'origin'),
1858 ('Description', 'description'),
1859 ('Untouchable', 'untouchable'),
1860 ('Announce', 'announce'),
1861 ('Codename', 'codename'),
1862 ('OverrideCodename', 'overridecodename'),
1863 ('ValidTime', 'validtime'),
1864 ('Priority', 'priority'),
1865 ('NotAutomatic', 'notautomatic'),
1866 ('CopyChanges', 'copychanges'),
1867 ('OverrideSuite', 'overridesuite')]
1869 # Why the heck don't we have any UNIQUE constraints in table suite?
1870 # TODO: Add UNIQUE constraints for appropriate columns.
1871 class Suite(ORMObject):
1872 def __init__(self, suite_name = None, version = None):
1873 self.suite_name = suite_name
1874 self.version = version
1876 def properties(self):
1877 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
1880 def not_null_constraints(self):
1881 return ['suite_name']
1883 def __eq__(self, val):
1884 if isinstance(val, str):
1885 return (self.suite_name == val)
1886 # This signals to use the normal comparison operator
1887 return NotImplemented
1889 def __ne__(self, val):
1890 if isinstance(val, str):
1891 return (self.suite_name != val)
1892 # This signals to use the normal comparison operator
1893 return NotImplemented
1897 for disp, field in SUITE_FIELDS:
1898 val = getattr(self, field, None)
1900 ret.append("%s: %s" % (disp, val))
1902 return "\n".join(ret)
1904 def get_architectures(self, skipsrc=False, skipall=False):
1906 Returns list of Architecture objects
1908 @type skipsrc: boolean
1909 @param skipsrc: Whether to skip returning the 'source' architecture entry
1912 @type skipall: boolean
1913 @param skipall: Whether to skip returning the 'all' architecture entry
1917 @return: list of Architecture objects for the given name (may be empty)
1920 q = object_session(self).query(Architecture).with_parent(self)
1922 q = q.filter(Architecture.arch_string != 'source')
1924 q = q.filter(Architecture.arch_string != 'all')
1925 return q.order_by(Architecture.arch_string).all()
1927 def get_sources(self, source):
1929 Returns a query object representing DBSource that is part of C{suite}.
1931 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
1933 @type source: string
1934 @param source: source package name
1936 @rtype: sqlalchemy.orm.query.Query
1937 @return: a query of DBSource
1941 session = object_session(self)
1942 return session.query(DBSource).filter_by(source = source). \
1945 def get_overridesuite(self):
1946 if self.overridesuite is None:
1949 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
1953 return os.path.join(self.archive.path, 'dists', self.suite_name)
1956 def release_suite_output(self):
1957 if self.release_suite is not None:
1958 return self.release_suite
1959 return self.suite_name
1961 __all__.append('Suite')
1964 def get_suite(suite, session=None):
1966 Returns Suite object for given C{suite name}.
1969 @param suite: The name of the suite
1971 @type session: Session
1972 @param session: Optional SQLA session object (a temporary one will be
1973 generated if not supplied)
1976 @return: Suite object for the requested suite name (None if not present)
1979 # Start by looking for the dak internal name
1980 q = session.query(Suite).filter_by(suite_name=suite)
1983 except NoResultFound:
1987 q = session.query(Suite).filter_by(codename=suite)
1990 except NoResultFound:
1993 # Finally give release_suite a try
1994 q = session.query(Suite).filter_by(release_suite=suite)
1997 except NoResultFound:
2000 __all__.append('get_suite')
2002 ################################################################################
2005 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2007 Returns list of Architecture objects for given C{suite} name. The list is
2008 empty if suite does not exist.
2011 @param suite: Suite name to search for
2013 @type skipsrc: boolean
2014 @param skipsrc: Whether to skip returning the 'source' architecture entry
2017 @type skipall: boolean
2018 @param skipall: Whether to skip returning the 'all' architecture entry
2021 @type session: Session
2022 @param session: Optional SQL session object (a temporary one will be
2023 generated if not supplied)
2026 @return: list of Architecture objects for the given name (may be empty)
2030 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2031 except AttributeError:
2034 __all__.append('get_suite_architectures')
2036 ################################################################################
2038 class Uid(ORMObject):
2039 def __init__(self, uid = None, name = None):
2043 def __eq__(self, val):
2044 if isinstance(val, str):
2045 return (self.uid == val)
2046 # This signals to use the normal comparison operator
2047 return NotImplemented
2049 def __ne__(self, val):
2050 if isinstance(val, str):
2051 return (self.uid != val)
2052 # This signals to use the normal comparison operator
2053 return NotImplemented
2055 def properties(self):
2056 return ['uid', 'name', 'fingerprint']
2058 def not_null_constraints(self):
2061 __all__.append('Uid')
2064 def get_or_set_uid(uidname, session=None):
2066 Returns uid object for given uidname.
2068 If no matching uidname is found, a row is inserted.
2070 @type uidname: string
2071 @param uidname: The uid to add
2073 @type session: SQLAlchemy
2074 @param session: Optional SQL session object (a temporary one will be
2075 generated if not supplied). If not passed, a commit will be performed at
2076 the end of the function, otherwise the caller is responsible for commiting.
2079 @return: the uid object for the given uidname
2082 q = session.query(Uid).filter_by(uid=uidname)
2086 except NoResultFound:
2090 session.commit_or_flush()
2095 __all__.append('get_or_set_uid')
2098 def get_uid_from_fingerprint(fpr, session=None):
2099 q = session.query(Uid)
2100 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2104 except NoResultFound:
2107 __all__.append('get_uid_from_fingerprint')
2109 ################################################################################
2111 class MetadataKey(ORMObject):
2112 def __init__(self, key = None):
2115 def properties(self):
2118 def not_null_constraints(self):
2121 __all__.append('MetadataKey')
2124 def get_or_set_metadatakey(keyname, session=None):
2126 Returns MetadataKey object for given uidname.
2128 If no matching keyname is found, a row is inserted.
2130 @type uidname: string
2131 @param uidname: The keyname to add
2133 @type session: SQLAlchemy
2134 @param session: Optional SQL session object (a temporary one will be
2135 generated if not supplied). If not passed, a commit will be performed at
2136 the end of the function, otherwise the caller is responsible for commiting.
2139 @return: the metadatakey object for the given keyname
2142 q = session.query(MetadataKey).filter_by(key=keyname)
2146 except NoResultFound:
2147 ret = MetadataKey(keyname)
2149 session.commit_or_flush()
2153 __all__.append('get_or_set_metadatakey')
2155 ################################################################################
2157 class BinaryMetadata(ORMObject):
2158 def __init__(self, key = None, value = None, binary = None):
2161 self.binary = binary
2163 def properties(self):
2164 return ['binary', 'key', 'value']
2166 def not_null_constraints(self):
2169 __all__.append('BinaryMetadata')
2171 ################################################################################
2173 class SourceMetadata(ORMObject):
2174 def __init__(self, key = None, value = None, source = None):
2177 self.source = source
2179 def properties(self):
2180 return ['source', 'key', 'value']
2182 def not_null_constraints(self):
2185 __all__.append('SourceMetadata')
2187 ################################################################################
2189 class MetadataProxy(object):
2190 def __init__(self, session, query):
2191 self.session = session
2194 def _get(self, key):
2195 metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
2196 if metadata_key is None:
2198 metadata = self.query.filter_by(key=metadata_key).first()
2201 def __contains__(self, key):
2202 if self._get(key) is not None:
2206 def __getitem__(self, key):
2207 metadata = self._get(key)
2208 if metadata is None:
2210 return metadata.value
2212 def get(self, key, default=None):
2218 ################################################################################
2220 class VersionCheck(ORMObject):
2221 def __init__(self, *args, **kwargs):
2224 def properties(self):
2225 #return ['suite_id', 'check', 'reference_id']
2228 def not_null_constraints(self):
2229 return ['suite', 'check', 'reference']
2231 __all__.append('VersionCheck')
2234 def get_version_checks(suite_name, check = None, session = None):
2235 suite = get_suite(suite_name, session)
2237 # Make sure that what we return is iterable so that list comprehensions
2238 # involving this don't cause a traceback
2240 q = session.query(VersionCheck).filter_by(suite=suite)
2242 q = q.filter_by(check=check)
2245 __all__.append('get_version_checks')
2247 ################################################################################
2249 class DBConn(object):
2251 database module init.
2255 def __init__(self, *args, **kwargs):
2256 self.__dict__ = self.__shared_state
2258 if not getattr(self, 'initialised', False):
2259 self.initialised = True
2260 self.debug = kwargs.has_key('debug')
2263 def __setuptables(self):
2266 'acl_architecture_map',
2267 'acl_fingerprint_map',
2274 'binaries_metadata',
2282 'external_overrides',
2283 'extra_src_references',
2285 'files_archive_map',
2291 # TODO: the maintainer column in table override should be removed.
2295 'policy_queue_upload',
2296 'policy_queue_upload_binaries_map',
2297 'policy_queue_byhand_file',
2300 'signature_history',
2309 'suite_architectures',
2310 'suite_build_queue_copy',
2311 'suite_src_formats',
2317 'almost_obsolete_all_associations',
2318 'almost_obsolete_src_associations',
2319 'any_associations_source',
2320 'bin_associations_binaries',
2321 'binaries_suite_arch',
2324 'newest_all_associations',
2325 'newest_any_associations',
2327 'newest_src_association',
2328 'obsolete_all_associations',
2329 'obsolete_any_associations',
2330 'obsolete_any_by_all_associations',
2331 'obsolete_src_associations',
2334 'src_associations_bin',
2335 'src_associations_src',
2336 'suite_arch_by_name',
2339 for table_name in tables:
2340 table = Table(table_name, self.db_meta, \
2341 autoload=True, useexisting=True)
2342 setattr(self, 'tbl_%s' % table_name, table)
2344 for view_name in views:
2345 view = Table(view_name, self.db_meta, autoload=True)
2346 setattr(self, 'view_%s' % view_name, view)
2348 def __setupmappers(self):
2349 mapper(Architecture, self.tbl_architecture,
2350 properties = dict(arch_id = self.tbl_architecture.c.id,
2351 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2352 order_by=self.tbl_suite.c.suite_name,
2353 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2354 extension = validator)
2356 mapper(ACL, self.tbl_acl,
2358 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2359 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2360 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2361 per_source = relation(ACLPerSource, collection_class=set),
2364 mapper(ACLPerSource, self.tbl_acl_per_source,
2366 acl = relation(ACL),
2367 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2368 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2371 mapper(Archive, self.tbl_archive,
2372 properties = dict(archive_id = self.tbl_archive.c.id,
2373 archive_name = self.tbl_archive.c.name))
2375 mapper(ArchiveFile, self.tbl_files_archive_map,
2376 properties = dict(archive = relation(Archive, backref='files'),
2377 component = relation(Component),
2378 file = relation(PoolFile, backref='archives')))
2380 mapper(BuildQueue, self.tbl_build_queue,
2381 properties = dict(queue_id = self.tbl_build_queue.c.id,
2382 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2384 mapper(DBBinary, self.tbl_binaries,
2385 properties = dict(binary_id = self.tbl_binaries.c.id,
2386 package = self.tbl_binaries.c.package,
2387 version = self.tbl_binaries.c.version,
2388 maintainer_id = self.tbl_binaries.c.maintainer,
2389 maintainer = relation(Maintainer),
2390 source_id = self.tbl_binaries.c.source,
2391 source = relation(DBSource, backref='binaries'),
2392 arch_id = self.tbl_binaries.c.architecture,
2393 architecture = relation(Architecture),
2394 poolfile_id = self.tbl_binaries.c.file,
2395 poolfile = relation(PoolFile),
2396 binarytype = self.tbl_binaries.c.type,
2397 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2398 fingerprint = relation(Fingerprint),
2399 install_date = self.tbl_binaries.c.install_date,
2400 suites = relation(Suite, secondary=self.tbl_bin_associations,
2401 backref=backref('binaries', lazy='dynamic')),
2402 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2403 backref=backref('extra_binary_references', lazy='dynamic')),
2404 key = relation(BinaryMetadata, cascade='all',
2405 collection_class=attribute_mapped_collection('key'))),
2406 extension = validator)
2408 mapper(Component, self.tbl_component,
2409 properties = dict(component_id = self.tbl_component.c.id,
2410 component_name = self.tbl_component.c.name),
2411 extension = validator)
2413 mapper(DBConfig, self.tbl_config,
2414 properties = dict(config_id = self.tbl_config.c.id))
2416 mapper(DSCFile, self.tbl_dsc_files,
2417 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2418 source_id = self.tbl_dsc_files.c.source,
2419 source = relation(DBSource),
2420 poolfile_id = self.tbl_dsc_files.c.file,
2421 poolfile = relation(PoolFile)))
2423 mapper(ExternalOverride, self.tbl_external_overrides,
2425 suite_id = self.tbl_external_overrides.c.suite,
2426 suite = relation(Suite),
2427 component_id = self.tbl_external_overrides.c.component,
2428 component = relation(Component)))
2430 mapper(PoolFile, self.tbl_files,
2431 properties = dict(file_id = self.tbl_files.c.id,
2432 filesize = self.tbl_files.c.size),
2433 extension = validator)
2435 mapper(Fingerprint, self.tbl_fingerprint,
2436 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2437 uid_id = self.tbl_fingerprint.c.uid,
2438 uid = relation(Uid),
2439 keyring_id = self.tbl_fingerprint.c.keyring,
2440 keyring = relation(Keyring),
2441 acl = relation(ACL)),
2442 extension = validator)
2444 mapper(Keyring, self.tbl_keyrings,
2445 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2446 keyring_id = self.tbl_keyrings.c.id,
2447 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2449 mapper(DBChange, self.tbl_changes,
2450 properties = dict(change_id = self.tbl_changes.c.id,
2451 seen = self.tbl_changes.c.seen,
2452 source = self.tbl_changes.c.source,
2453 binaries = self.tbl_changes.c.binaries,
2454 architecture = self.tbl_changes.c.architecture,
2455 distribution = self.tbl_changes.c.distribution,
2456 urgency = self.tbl_changes.c.urgency,
2457 maintainer = self.tbl_changes.c.maintainer,
2458 changedby = self.tbl_changes.c.changedby,
2459 date = self.tbl_changes.c.date,
2460 version = self.tbl_changes.c.version))
2462 mapper(Maintainer, self.tbl_maintainer,
2463 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2464 maintains_sources = relation(DBSource, backref='maintainer',
2465 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2466 changed_sources = relation(DBSource, backref='changedby',
2467 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2468 extension = validator)
2470 mapper(NewComment, self.tbl_new_comments,
2471 properties = dict(comment_id = self.tbl_new_comments.c.id,
2472 policy_queue = relation(PolicyQueue)))
2474 mapper(Override, self.tbl_override,
2475 properties = dict(suite_id = self.tbl_override.c.suite,
2476 suite = relation(Suite, \
2477 backref=backref('overrides', lazy='dynamic')),
2478 package = self.tbl_override.c.package,
2479 component_id = self.tbl_override.c.component,
2480 component = relation(Component, \
2481 backref=backref('overrides', lazy='dynamic')),
2482 priority_id = self.tbl_override.c.priority,
2483 priority = relation(Priority, \
2484 backref=backref('overrides', lazy='dynamic')),
2485 section_id = self.tbl_override.c.section,
2486 section = relation(Section, \
2487 backref=backref('overrides', lazy='dynamic')),
2488 overridetype_id = self.tbl_override.c.type,
2489 overridetype = relation(OverrideType, \
2490 backref=backref('overrides', lazy='dynamic'))))
2492 mapper(OverrideType, self.tbl_override_type,
2493 properties = dict(overridetype = self.tbl_override_type.c.type,
2494 overridetype_id = self.tbl_override_type.c.id))
2496 mapper(PolicyQueue, self.tbl_policy_queue,
2497 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2498 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2500 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2502 changes = relation(DBChange),
2503 policy_queue = relation(PolicyQueue, backref='uploads'),
2504 target_suite = relation(Suite),
2505 source = relation(DBSource),
2506 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2509 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2511 upload = relation(PolicyQueueUpload, backref='byhand'),
2515 mapper(Priority, self.tbl_priority,
2516 properties = dict(priority_id = self.tbl_priority.c.id))
2518 mapper(Section, self.tbl_section,
2519 properties = dict(section_id = self.tbl_section.c.id,
2520 section=self.tbl_section.c.section))
2522 mapper(SignatureHistory, self.tbl_signature_history)
2524 mapper(DBSource, self.tbl_source,
2525 properties = dict(source_id = self.tbl_source.c.id,
2526 version = self.tbl_source.c.version,
2527 maintainer_id = self.tbl_source.c.maintainer,
2528 poolfile_id = self.tbl_source.c.file,
2529 poolfile = relation(PoolFile),
2530 fingerprint_id = self.tbl_source.c.sig_fpr,
2531 fingerprint = relation(Fingerprint),
2532 changedby_id = self.tbl_source.c.changedby,
2533 srcfiles = relation(DSCFile,
2534 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2535 suites = relation(Suite, secondary=self.tbl_src_associations,
2536 backref=backref('sources', lazy='dynamic')),
2537 uploaders = relation(Maintainer,
2538 secondary=self.tbl_src_uploaders),
2539 key = relation(SourceMetadata, cascade='all',
2540 collection_class=attribute_mapped_collection('key'))),
2541 extension = validator)
2543 mapper(SrcFormat, self.tbl_src_format,
2544 properties = dict(src_format_id = self.tbl_src_format.c.id,
2545 format_name = self.tbl_src_format.c.format_name))
2547 mapper(Suite, self.tbl_suite,
2548 properties = dict(suite_id = self.tbl_suite.c.id,
2549 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2550 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2551 copy_queues = relation(BuildQueue,
2552 secondary=self.tbl_suite_build_queue_copy),
2553 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2554 backref=backref('suites', lazy='dynamic')),
2555 archive = relation(Archive, backref='suites'),
2556 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2557 components = relation(Component, secondary=self.tbl_component_suite,
2558 order_by=self.tbl_component.c.ordering,
2559 backref=backref('suites'))),
2560 extension = validator)
2562 mapper(Uid, self.tbl_uid,
2563 properties = dict(uid_id = self.tbl_uid.c.id,
2564 fingerprint = relation(Fingerprint)),
2565 extension = validator)
2567 mapper(BinContents, self.tbl_bin_contents,
2569 binary = relation(DBBinary,
2570 backref=backref('contents', lazy='dynamic', cascade='all')),
2571 file = self.tbl_bin_contents.c.file))
2573 mapper(SrcContents, self.tbl_src_contents,
2575 source = relation(DBSource,
2576 backref=backref('contents', lazy='dynamic', cascade='all')),
2577 file = self.tbl_src_contents.c.file))
2579 mapper(MetadataKey, self.tbl_metadata_keys,
2581 key_id = self.tbl_metadata_keys.c.key_id,
2582 key = self.tbl_metadata_keys.c.key))
2584 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2586 binary_id = self.tbl_binaries_metadata.c.bin_id,
2587 binary = relation(DBBinary),
2588 key_id = self.tbl_binaries_metadata.c.key_id,
2589 key = relation(MetadataKey),
2590 value = self.tbl_binaries_metadata.c.value))
2592 mapper(SourceMetadata, self.tbl_source_metadata,
2594 source_id = self.tbl_source_metadata.c.src_id,
2595 source = relation(DBSource),
2596 key_id = self.tbl_source_metadata.c.key_id,
2597 key = relation(MetadataKey),
2598 value = self.tbl_source_metadata.c.value))
2600 mapper(VersionCheck, self.tbl_version_check,
2602 suite_id = self.tbl_version_check.c.suite,
2603 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2604 reference_id = self.tbl_version_check.c.reference,
2605 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2607 ## Connection functions
2608 def __createconn(self):
2609 from config import Config
2611 if cnf.has_key("DB::Service"):
2612 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2613 elif cnf.has_key("DB::Host"):
2615 connstr = "postgresql://%s" % cnf["DB::Host"]
2616 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2617 connstr += ":%s" % cnf["DB::Port"]
2618 connstr += "/%s" % cnf["DB::Name"]
2621 connstr = "postgresql:///%s" % cnf["DB::Name"]
2622 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2623 connstr += "?port=%s" % cnf["DB::Port"]
2625 engine_args = { 'echo': self.debug }
2626 if cnf.has_key('DB::PoolSize'):
2627 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2628 if cnf.has_key('DB::MaxOverflow'):
2629 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2630 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2631 cnf['DB::Unicode'] == 'false':
2632 engine_args['use_native_unicode'] = False
2634 # Monkey patch a new dialect in in order to support service= syntax
2635 import sqlalchemy.dialects.postgresql
2636 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2637 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2638 def create_connect_args(self, url):
2639 if str(url).startswith('postgresql://service='):
2641 servicename = str(url)[21:]
2642 return (['service=%s' % servicename], {})
2644 return PGDialect_psycopg2.create_connect_args(self, url)
2646 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2649 self.db_pg = create_engine(connstr, **engine_args)
2650 self.db_meta = MetaData()
2651 self.db_meta.bind = self.db_pg
2652 self.db_smaker = sessionmaker(bind=self.db_pg,
2656 self.__setuptables()
2657 self.__setupmappers()
2659 except OperationalError as e:
2661 utils.fubar("Cannot connect to database (%s)" % str(e))
2663 self.pid = os.getpid()
2665 def session(self, work_mem = 0):
2667 Returns a new session object. If a work_mem parameter is provided a new
2668 transaction is started and the work_mem parameter is set for this
2669 transaction. The work_mem parameter is measured in MB. A default value
2670 will be used if the parameter is not set.
2672 # reinitialize DBConn in new processes
2673 if self.pid != os.getpid():
2676 session = self.db_smaker()
2678 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2681 __all__.append('DBConn')