5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 import daklib.daksubprocess
39 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from tarfile import TarFile
57 from inspect import getargspec
60 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
62 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
63 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
64 from sqlalchemy import types as sqltypes
65 from sqlalchemy.orm.collections import attribute_mapped_collection
66 from sqlalchemy.ext.associationproxy import association_proxy
68 # Don't remove this, we re-export the exceptions to scripts which import us
69 from sqlalchemy.exc import *
70 from sqlalchemy.orm.exc import NoResultFound
72 # Only import Config until Queue stuff is changed to store its config
74 from config import Config
75 from textutils import fix_maintainer
76 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
78 # suppress some deprecation warnings in squeeze related to sqlalchemy
80 warnings.filterwarnings('ignore', \
81 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
83 warnings.filterwarnings('ignore', \
84 "Predicate of partial index .* ignored during reflection", \
88 ################################################################################
90 # Patch in support for the debversion field type so that it works during
94 # that is for sqlalchemy 0.6
95 UserDefinedType = sqltypes.UserDefinedType
97 # this one for sqlalchemy 0.5
98 UserDefinedType = sqltypes.TypeEngine
100 class DebVersion(UserDefinedType):
101 def get_col_spec(self):
104 def bind_processor(self, dialect):
107 # ' = None' is needed for sqlalchemy 0.5:
108 def result_processor(self, dialect, coltype = None):
111 sa_major_version = sqlalchemy.__version__[0:3]
112 if sa_major_version in ["0.5", "0.6", "0.7", "0.8", "0.9"]:
113 from sqlalchemy.databases import postgres
114 postgres.ischema_names['debversion'] = DebVersion
116 raise Exception("dak only ported to SQLA versions 0.5 to 0.9. See daklib/dbconn.py")
118 ################################################################################
120 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
122 ################################################################################
124 def session_wrapper(fn):
126 Wrapper around common ".., session=None):" handling. If the wrapped
127 function is called without passing 'session', we create a local one
128 and destroy it when the function ends.
130 Also attaches a commit_or_flush method to the session; if we created a
131 local session, this is a synonym for session.commit(), otherwise it is a
132 synonym for session.flush().
135 def wrapped(*args, **kwargs):
136 private_transaction = False
138 # Find the session object
139 session = kwargs.get('session')
142 if len(args) <= len(getargspec(fn)[0]) - 1:
143 # No session specified as last argument or in kwargs
144 private_transaction = True
145 session = kwargs['session'] = DBConn().session()
147 # Session is last argument in args
151 session = args[-1] = DBConn().session()
152 private_transaction = True
154 if private_transaction:
155 session.commit_or_flush = session.commit
157 session.commit_or_flush = session.flush
160 return fn(*args, **kwargs)
162 if private_transaction:
163 # We created a session; close it.
166 wrapped.__doc__ = fn.__doc__
167 wrapped.func_name = fn.func_name
171 __all__.append('session_wrapper')
173 ################################################################################
175 class ORMObject(object):
177 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
178 derived classes must implement the properties() method.
181 def properties(self):
183 This method should be implemented by all derived classes and returns a
184 list of the important properties. The properties 'created' and
185 'modified' will be added automatically. A suffix '_count' should be
186 added to properties that are lists or query objects. The most important
187 property name should be returned as the first element in the list
188 because it is used by repr().
194 Returns a JSON representation of the object based on the properties
195 returned from the properties() method.
198 # add created and modified
199 all_properties = self.properties() + ['created', 'modified']
200 for property in all_properties:
201 # check for list or query
202 if property[-6:] == '_count':
203 real_property = property[:-6]
204 if not hasattr(self, real_property):
206 value = getattr(self, real_property)
207 if hasattr(value, '__len__'):
210 elif hasattr(value, 'count'):
211 # query (but not during validation)
212 if self.in_validation:
214 value = value.count()
216 raise KeyError('Do not understand property %s.' % property)
218 if not hasattr(self, property):
221 value = getattr(self, property)
225 elif isinstance(value, ORMObject):
226 # use repr() for ORMObject types
229 # we want a string for all other types because json cannot
232 data[property] = value
233 return json.dumps(data)
237 Returns the name of the class.
239 return type(self).__name__
243 Returns a short string representation of the object using the first
244 element from the properties() method.
246 primary_property = self.properties()[0]
247 value = getattr(self, primary_property)
248 return '<%s %s>' % (self.classname(), str(value))
252 Returns a human readable form of the object using the properties()
255 return '<%s %s>' % (self.classname(), self.json())
257 def not_null_constraints(self):
259 Returns a list of properties that must be not NULL. Derived classes
260 should override this method if needed.
264 validation_message = \
265 "Validation failed because property '%s' must not be empty in object\n%s"
267 in_validation = False
271 This function validates the not NULL constraints as returned by
272 not_null_constraints(). It raises the DBUpdateError exception if
275 for property in self.not_null_constraints():
276 # TODO: It is a bit awkward that the mapper configuration allow
277 # directly setting the numeric _id columns. We should get rid of it
279 if hasattr(self, property + '_id') and \
280 getattr(self, property + '_id') is not None:
282 if not hasattr(self, property) or getattr(self, property) is None:
283 # str() might lead to races due to a 2nd flush
284 self.in_validation = True
285 message = self.validation_message % (property, str(self))
286 self.in_validation = False
287 raise DBUpdateError(message)
291 def get(cls, primary_key, session = None):
293 This is a support function that allows getting an object by its primary
296 Architecture.get(3[, session])
298 instead of the more verbose
300 session.query(Architecture).get(3)
302 return session.query(cls).get(primary_key)
304 def session(self, replace = False):
306 Returns the current session that is associated with the object. May
307 return None is object is in detached state.
310 return object_session(self)
312 def clone(self, session = None):
314 Clones the current object in a new session and returns the new clone. A
315 fresh session is created if the optional session parameter is not
316 provided. The function will fail if a session is provided and has
319 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
320 an existing object to allow several threads to work with their own
321 instances of an ORMObject.
323 WARNING: Only persistent (committed) objects can be cloned. Changes
324 made to the original object that are not committed yet will get lost.
325 The session of the new object will always be rolled back to avoid
329 if self.session() is None:
330 raise RuntimeError( \
331 'Method clone() failed for detached object:\n%s' % self)
332 self.session().flush()
333 mapper = object_mapper(self)
334 primary_key = mapper.primary_key_from_instance(self)
335 object_class = self.__class__
337 session = DBConn().session()
338 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
339 raise RuntimeError( \
340 'Method clone() failed due to unflushed changes in session.')
341 new_object = session.query(object_class).get(primary_key)
343 if new_object is None:
344 raise RuntimeError( \
345 'Method clone() failed for non-persistent object:\n%s' % self)
348 __all__.append('ORMObject')
350 ################################################################################
352 class Validator(MapperExtension):
354 This class calls the validate() method for each instance for the
355 'before_update' and 'before_insert' events. A global object validator is
356 used for configuring the individual mappers.
359 def before_update(self, mapper, connection, instance):
363 def before_insert(self, mapper, connection, instance):
367 validator = Validator()
369 ################################################################################
371 class ACL(ORMObject):
373 return "<ACL {0}>".format(self.name)
375 __all__.append('ACL')
377 class ACLPerSource(ORMObject):
379 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
381 __all__.append('ACLPerSource')
383 ################################################################################
385 class Architecture(ORMObject):
386 def __init__(self, arch_string = None, description = None):
387 self.arch_string = arch_string
388 self.description = description
390 def __eq__(self, val):
391 if isinstance(val, str):
392 return (self.arch_string== val)
393 # This signals to use the normal comparison operator
394 return NotImplemented
396 def __ne__(self, val):
397 if isinstance(val, str):
398 return (self.arch_string != val)
399 # This signals to use the normal comparison operator
400 return NotImplemented
402 def properties(self):
403 return ['arch_string', 'arch_id', 'suites_count']
405 def not_null_constraints(self):
406 return ['arch_string']
408 __all__.append('Architecture')
411 def get_architecture(architecture, session=None):
413 Returns database id for given C{architecture}.
415 @type architecture: string
416 @param architecture: The name of the architecture
418 @type session: Session
419 @param session: Optional SQLA session object (a temporary one will be
420 generated if not supplied)
423 @return: Architecture object for the given arch (None if not present)
426 q = session.query(Architecture).filter_by(arch_string=architecture)
430 except NoResultFound:
433 __all__.append('get_architecture')
435 ################################################################################
437 class Archive(object):
438 def __init__(self, *args, **kwargs):
442 return '<Archive %s>' % self.archive_name
444 __all__.append('Archive')
447 def get_archive(archive, session=None):
449 returns database id for given C{archive}.
451 @type archive: string
452 @param archive: the name of the arhive
454 @type session: Session
455 @param session: Optional SQLA session object (a temporary one will be
456 generated if not supplied)
459 @return: Archive object for the given name (None if not present)
462 archive = archive.lower()
464 q = session.query(Archive).filter_by(archive_name=archive)
468 except NoResultFound:
471 __all__.append('get_archive')
473 ################################################################################
475 class ArchiveFile(object):
476 def __init__(self, archive=None, component=None, file=None):
477 self.archive = archive
478 self.component = component
482 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
484 __all__.append('ArchiveFile')
486 ################################################################################
488 class BinContents(ORMObject):
489 def __init__(self, file = None, binary = None):
493 def properties(self):
494 return ['file', 'binary']
496 __all__.append('BinContents')
498 ################################################################################
500 class DBBinary(ORMObject):
501 def __init__(self, package = None, source = None, version = None, \
502 maintainer = None, architecture = None, poolfile = None, \
503 binarytype = 'deb', fingerprint=None):
504 self.package = package
506 self.version = version
507 self.maintainer = maintainer
508 self.architecture = architecture
509 self.poolfile = poolfile
510 self.binarytype = binarytype
511 self.fingerprint = fingerprint
515 return self.binary_id
517 def properties(self):
518 return ['package', 'version', 'maintainer', 'source', 'architecture', \
519 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
520 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
522 def not_null_constraints(self):
523 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
526 metadata = association_proxy('key', 'value')
528 def scan_contents(self):
530 Yields the contents of the package. Only regular files are yielded and
531 the path names are normalized after converting them from either utf-8
532 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
533 package does not contain any regular file.
535 fullpath = self.poolfile.fullpath
536 dpkg_cmd = ('dpkg-deb', '--fsys-tarfile', fullpath)
537 dpkg = daklib.daksubprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE)
538 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
539 for member in tar.getmembers():
540 if not member.isdir():
541 name = normpath(member.name)
542 # enforce proper utf-8 encoding
545 except UnicodeDecodeError:
546 name = name.decode('iso8859-1').encode('utf-8')
552 def read_control(self):
554 Reads the control information from a binary.
557 @return: stanza text of the control section.
560 fullpath = self.poolfile.fullpath
561 with open(fullpath, 'r') as deb_file:
562 return utils.deb_extract_control(deb_file)
564 def read_control_fields(self):
566 Reads the control information from a binary and return
570 @return: fields of the control section as a dictionary.
572 stanza = self.read_control()
573 return apt_pkg.TagSection(stanza)
577 session = object_session(self)
578 query = session.query(BinaryMetadata).filter_by(binary=self)
579 return MetadataProxy(session, query)
581 __all__.append('DBBinary')
584 def get_suites_binary_in(package, session=None):
586 Returns list of Suite objects which given C{package} name is in
589 @param package: DBBinary package name to search for
592 @return: list of Suite objects for the given package
595 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
597 __all__.append('get_suites_binary_in')
600 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
602 Returns the component name of the newest binary package in suite_list or
603 None if no package is found. The result can be optionally filtered by a list
604 of architecture names.
607 @param package: DBBinary package name to search for
609 @type suite_list: list of str
610 @param suite_list: list of suite_name items
612 @type arch_list: list of str
613 @param arch_list: optional list of arch_string items that defaults to []
615 @rtype: str or NoneType
616 @return: name of component or None
619 q = session.query(DBBinary).filter_by(package = package). \
620 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
621 if len(arch_list) > 0:
622 q = q.join(DBBinary.architecture). \
623 filter(Architecture.arch_string.in_(arch_list))
624 binary = q.order_by(desc(DBBinary.version)).first()
628 return binary.poolfile.component.component_name
630 __all__.append('get_component_by_package_suite')
632 ################################################################################
634 class BuildQueue(object):
635 def __init__(self, *args, **kwargs):
639 return '<BuildQueue %s>' % self.queue_name
641 __all__.append('BuildQueue')
643 ################################################################################
645 class Component(ORMObject):
646 def __init__(self, component_name = None):
647 self.component_name = component_name
649 def __eq__(self, val):
650 if isinstance(val, str):
651 return (self.component_name == val)
652 # This signals to use the normal comparison operator
653 return NotImplemented
655 def __ne__(self, val):
656 if isinstance(val, str):
657 return (self.component_name != val)
658 # This signals to use the normal comparison operator
659 return NotImplemented
661 def properties(self):
662 return ['component_name', 'component_id', 'description', \
663 'meets_dfsg', 'overrides_count']
665 def not_null_constraints(self):
666 return ['component_name']
669 __all__.append('Component')
672 def get_component(component, session=None):
674 Returns database id for given C{component}.
676 @type component: string
677 @param component: The name of the override type
680 @return: the database id for the given component
683 component = component.lower()
685 q = session.query(Component).filter_by(component_name=component)
689 except NoResultFound:
692 __all__.append('get_component')
695 def get_mapped_component(component_name, session=None):
696 """get component after mappings
698 Evaluate component mappings from ComponentMappings in dak.conf for the
699 given component name.
701 @todo: ansgar wants to get rid of this. It's currently only used for
704 @type component_name: str
705 @param component_name: component name
707 @param session: database session
709 @rtype: L{daklib.dbconn.Component} or C{None}
710 @return: component after applying maps or C{None}
713 for m in cnf.value_list("ComponentMappings"):
714 (src, dst) = m.split()
715 if component_name == src:
717 component = session.query(Component).filter_by(component_name=component_name).first()
720 __all__.append('get_mapped_component')
723 def get_component_names(session=None):
725 Returns list of strings of component names.
728 @return: list of strings of component names
731 return [ x.component_name for x in session.query(Component).all() ]
733 __all__.append('get_component_names')
735 ################################################################################
737 class DBConfig(object):
738 def __init__(self, *args, **kwargs):
742 return '<DBConfig %s>' % self.name
744 __all__.append('DBConfig')
746 ################################################################################
748 class DSCFile(object):
749 def __init__(self, *args, **kwargs):
753 return '<DSCFile %s>' % self.dscfile_id
755 __all__.append('DSCFile')
758 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
760 Returns a list of DSCFiles which may be empty
762 @type dscfile_id: int (optional)
763 @param dscfile_id: the dscfile_id of the DSCFiles to find
765 @type source_id: int (optional)
766 @param source_id: the source id related to the DSCFiles to find
768 @type poolfile_id: int (optional)
769 @param poolfile_id: the poolfile id related to the DSCFiles to find
772 @return: Possibly empty list of DSCFiles
775 q = session.query(DSCFile)
777 if dscfile_id is not None:
778 q = q.filter_by(dscfile_id=dscfile_id)
780 if source_id is not None:
781 q = q.filter_by(source_id=source_id)
783 if poolfile_id is not None:
784 q = q.filter_by(poolfile_id=poolfile_id)
788 __all__.append('get_dscfiles')
790 ################################################################################
792 class ExternalOverride(ORMObject):
793 def __init__(self, *args, **kwargs):
797 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
799 __all__.append('ExternalOverride')
801 ################################################################################
803 class PoolFile(ORMObject):
804 def __init__(self, filename = None, filesize = -1, \
806 self.filename = filename
807 self.filesize = filesize
812 session = DBConn().session().object_session(self)
813 af = session.query(ArchiveFile).join(Archive) \
814 .filter(ArchiveFile.file == self) \
815 .order_by(Archive.tainted.desc()).first()
820 session = DBConn().session().object_session(self)
821 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
822 .group_by(ArchiveFile.component_id).one()
823 return session.query(Component).get(component_id)
827 return os.path.basename(self.filename)
829 def is_valid(self, filesize = -1, md5sum = None):
830 return self.filesize == long(filesize) and self.md5sum == md5sum
832 def properties(self):
833 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
834 'sha256sum', 'source', 'binary', 'last_used']
836 def not_null_constraints(self):
837 return ['filename', 'md5sum']
839 def identical_to(self, filename):
841 compare size and hash with the given file
844 @return: true if the given file has the same size and hash as this object; false otherwise
846 st = os.stat(filename)
847 if self.filesize != st.st_size:
850 f = open(filename, "r")
851 sha256sum = apt_pkg.sha256sum(f)
852 if sha256sum != self.sha256sum:
857 __all__.append('PoolFile')
859 ################################################################################
861 class Fingerprint(ORMObject):
862 def __init__(self, fingerprint = None):
863 self.fingerprint = fingerprint
865 def properties(self):
866 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
869 def not_null_constraints(self):
870 return ['fingerprint']
872 __all__.append('Fingerprint')
875 def get_fingerprint(fpr, session=None):
877 Returns Fingerprint object for given fpr.
880 @param fpr: The fpr to find / add
882 @type session: SQLAlchemy
883 @param session: Optional SQL session object (a temporary one will be
884 generated if not supplied).
887 @return: the Fingerprint object for the given fpr or None
890 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
894 except NoResultFound:
899 __all__.append('get_fingerprint')
902 def get_or_set_fingerprint(fpr, session=None):
904 Returns Fingerprint object for given fpr.
906 If no matching fpr is found, a row is inserted.
909 @param fpr: The fpr to find / add
911 @type session: SQLAlchemy
912 @param session: Optional SQL session object (a temporary one will be
913 generated if not supplied). If not passed, a commit will be performed at
914 the end of the function, otherwise the caller is responsible for commiting.
915 A flush will be performed either way.
918 @return: the Fingerprint object for the given fpr
921 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
925 except NoResultFound:
926 fingerprint = Fingerprint()
927 fingerprint.fingerprint = fpr
928 session.add(fingerprint)
929 session.commit_or_flush()
934 __all__.append('get_or_set_fingerprint')
936 ################################################################################
938 # Helper routine for Keyring class
939 def get_ldap_name(entry):
941 for k in ["cn", "mn", "sn"]:
943 if ret and ret[0] != "" and ret[0] != "-":
945 return " ".join(name)
947 ################################################################################
949 class Keyring(object):
953 def __init__(self, *args, **kwargs):
957 return '<Keyring %s>' % self.keyring_name
959 def de_escape_gpg_str(self, txt):
960 esclist = re.split(r'(\\x..)', txt)
961 for x in range(1,len(esclist),2):
962 esclist[x] = "%c" % (int(esclist[x][2:],16))
963 return "".join(esclist)
965 def parse_address(self, uid):
966 """parses uid and returns a tuple of real name and email address"""
968 (name, address) = email.Utils.parseaddr(uid)
969 name = re.sub(r"\s*[(].*[)]", "", name)
970 name = self.de_escape_gpg_str(name)
973 return (name, address)
975 def load_keys(self, keyring):
976 if not self.keyring_id:
977 raise Exception('Must be initialized with database information')
979 cmd = ["gpg", "--no-default-keyring", "--keyring", keyring,
980 "--with-colons", "--fingerprint", "--fingerprint"]
981 p = daklib.daksubprocess.Popen(cmd, stdout=subprocess.PIPE)
984 need_fingerprint = False
986 for line in p.stdout:
987 field = line.split(":")
988 if field[0] == "pub":
991 (name, addr) = self.parse_address(field[9])
993 self.keys[key]["email"] = addr
994 self.keys[key]["name"] = name
995 need_fingerprint = True
996 elif key and field[0] == "uid":
997 (name, addr) = self.parse_address(field[9])
998 if "email" not in self.keys[key] and "@" in addr:
999 self.keys[key]["email"] = addr
1000 self.keys[key]["name"] = name
1001 elif need_fingerprint and field[0] == "fpr":
1002 self.keys[key]["fingerprints"] = [field[9]]
1003 self.fpr_lookup[field[9]] = key
1004 need_fingerprint = False
1008 raise subprocess.CalledProcessError(r, cmd)
1010 def import_users_from_ldap(self, session):
1014 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1015 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1016 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1018 l = ldap.open(LDAPServer)
1021 l.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1022 l.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1023 l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1026 l.simple_bind_s("","")
1027 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1028 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1029 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1031 ldap_fin_uid_id = {}
1038 uid = entry["uid"][0]
1039 name = get_ldap_name(entry)
1040 fingerprints = entry["keyFingerPrint"]
1042 for f in fingerprints:
1043 key = self.fpr_lookup.get(f, None)
1044 if key not in self.keys:
1046 self.keys[key]["uid"] = uid
1050 keyid = get_or_set_uid(uid, session).uid_id
1051 byuid[keyid] = (uid, name)
1052 byname[uid] = (keyid, name)
1054 return (byname, byuid)
1056 def generate_users_from_keyring(self, format, session):
1060 for x in self.keys.keys():
1061 if "email" not in self.keys[x]:
1063 self.keys[x]["uid"] = format % "invalid-uid"
1065 uid = format % self.keys[x]["email"]
1066 keyid = get_or_set_uid(uid, session).uid_id
1067 byuid[keyid] = (uid, self.keys[x]["name"])
1068 byname[uid] = (keyid, self.keys[x]["name"])
1069 self.keys[x]["uid"] = uid
1072 uid = format % "invalid-uid"
1073 keyid = get_or_set_uid(uid, session).uid_id
1074 byuid[keyid] = (uid, "ungeneratable user id")
1075 byname[uid] = (keyid, "ungeneratable user id")
1077 return (byname, byuid)
1079 __all__.append('Keyring')
1082 def get_keyring(keyring, session=None):
1084 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1085 If C{keyring} already has an entry, simply return the existing Keyring
1087 @type keyring: string
1088 @param keyring: the keyring name
1091 @return: the Keyring object for this keyring
1094 q = session.query(Keyring).filter_by(keyring_name=keyring)
1098 except NoResultFound:
1101 __all__.append('get_keyring')
1104 def get_active_keyring_paths(session=None):
1107 @return: list of active keyring paths
1109 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1111 __all__.append('get_active_keyring_paths')
1113 ################################################################################
1115 class DBChange(object):
1116 def __init__(self, *args, **kwargs):
1120 return '<DBChange %s>' % self.changesname
1122 __all__.append('DBChange')
1125 def get_dbchange(filename, session=None):
1127 returns DBChange object for given C{filename}.
1129 @type filename: string
1130 @param filename: the name of the file
1132 @type session: Session
1133 @param session: Optional SQLA session object (a temporary one will be
1134 generated if not supplied)
1137 @return: DBChange object for the given filename (C{None} if not present)
1140 q = session.query(DBChange).filter_by(changesname=filename)
1144 except NoResultFound:
1147 __all__.append('get_dbchange')
1149 ################################################################################
1151 class Maintainer(ORMObject):
1152 def __init__(self, name = None):
1155 def properties(self):
1156 return ['name', 'maintainer_id']
1158 def not_null_constraints(self):
1161 def get_split_maintainer(self):
1162 if not hasattr(self, 'name') or self.name is None:
1163 return ('', '', '', '')
1165 return fix_maintainer(self.name.strip())
1167 __all__.append('Maintainer')
1170 def get_or_set_maintainer(name, session=None):
1172 Returns Maintainer object for given maintainer name.
1174 If no matching maintainer name is found, a row is inserted.
1177 @param name: The maintainer name to add
1179 @type session: SQLAlchemy
1180 @param session: Optional SQL session object (a temporary one will be
1181 generated if not supplied). If not passed, a commit will be performed at
1182 the end of the function, otherwise the caller is responsible for commiting.
1183 A flush will be performed either way.
1186 @return: the Maintainer object for the given maintainer
1189 q = session.query(Maintainer).filter_by(name=name)
1192 except NoResultFound:
1193 maintainer = Maintainer()
1194 maintainer.name = name
1195 session.add(maintainer)
1196 session.commit_or_flush()
1201 __all__.append('get_or_set_maintainer')
1204 def get_maintainer(maintainer_id, session=None):
1206 Return the name of the maintainer behind C{maintainer_id} or None if that
1207 maintainer_id is invalid.
1209 @type maintainer_id: int
1210 @param maintainer_id: the id of the maintainer
1213 @return: the Maintainer with this C{maintainer_id}
1216 return session.query(Maintainer).get(maintainer_id)
1218 __all__.append('get_maintainer')
1220 ################################################################################
1222 class NewComment(object):
1223 def __init__(self, *args, **kwargs):
1227 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1229 __all__.append('NewComment')
1232 def has_new_comment(policy_queue, package, version, session=None):
1234 Returns true if the given combination of C{package}, C{version} has a comment.
1236 @type package: string
1237 @param package: name of the package
1239 @type version: string
1240 @param version: package version
1242 @type session: Session
1243 @param session: Optional SQLA session object (a temporary one will be
1244 generated if not supplied)
1250 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1251 q = q.filter_by(package=package)
1252 q = q.filter_by(version=version)
1254 return bool(q.count() > 0)
1256 __all__.append('has_new_comment')
1259 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1261 Returns (possibly empty) list of NewComment objects for the given
1264 @type package: string (optional)
1265 @param package: name of the package
1267 @type version: string (optional)
1268 @param version: package version
1270 @type comment_id: int (optional)
1271 @param comment_id: An id of a comment
1273 @type session: Session
1274 @param session: Optional SQLA session object (a temporary one will be
1275 generated if not supplied)
1278 @return: A (possibly empty) list of NewComment objects will be returned
1281 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1282 if package is not None: q = q.filter_by(package=package)
1283 if version is not None: q = q.filter_by(version=version)
1284 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1288 __all__.append('get_new_comments')
1290 ################################################################################
1292 class Override(ORMObject):
1293 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1294 section = None, priority = None):
1295 self.package = package
1297 self.component = component
1298 self.overridetype = overridetype
1299 self.section = section
1300 self.priority = priority
1302 def properties(self):
1303 return ['package', 'suite', 'component', 'overridetype', 'section', \
1306 def not_null_constraints(self):
1307 return ['package', 'suite', 'component', 'overridetype', 'section']
1309 __all__.append('Override')
1312 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1314 Returns Override object for the given parameters
1316 @type package: string
1317 @param package: The name of the package
1319 @type suite: string, list or None
1320 @param suite: The name of the suite (or suites if a list) to limit to. If
1321 None, don't limit. Defaults to None.
1323 @type component: string, list or None
1324 @param component: The name of the component (or components if a list) to
1325 limit to. If None, don't limit. Defaults to None.
1327 @type overridetype: string, list or None
1328 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1329 limit to. If None, don't limit. Defaults to None.
1331 @type session: Session
1332 @param session: Optional SQLA session object (a temporary one will be
1333 generated if not supplied)
1336 @return: A (possibly empty) list of Override objects will be returned
1339 q = session.query(Override)
1340 q = q.filter_by(package=package)
1342 if suite is not None:
1343 if not isinstance(suite, list): suite = [suite]
1344 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1346 if component is not None:
1347 if not isinstance(component, list): component = [component]
1348 q = q.join(Component).filter(Component.component_name.in_(component))
1350 if overridetype is not None:
1351 if not isinstance(overridetype, list): overridetype = [overridetype]
1352 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1356 __all__.append('get_override')
1359 ################################################################################
1361 class OverrideType(ORMObject):
1362 def __init__(self, overridetype = None):
1363 self.overridetype = overridetype
1365 def properties(self):
1366 return ['overridetype', 'overridetype_id', 'overrides_count']
1368 def not_null_constraints(self):
1369 return ['overridetype']
1371 __all__.append('OverrideType')
1374 def get_override_type(override_type, session=None):
1376 Returns OverrideType object for given C{override type}.
1378 @type override_type: string
1379 @param override_type: The name of the override type
1381 @type session: Session
1382 @param session: Optional SQLA session object (a temporary one will be
1383 generated if not supplied)
1386 @return: the database id for the given override type
1389 q = session.query(OverrideType).filter_by(overridetype=override_type)
1393 except NoResultFound:
1396 __all__.append('get_override_type')
1398 ################################################################################
1400 class PolicyQueue(object):
1401 def __init__(self, *args, **kwargs):
1405 return '<PolicyQueue %s>' % self.queue_name
1407 __all__.append('PolicyQueue')
1410 def get_policy_queue(queuename, session=None):
1412 Returns PolicyQueue object for given C{queue name}
1414 @type queuename: string
1415 @param queuename: The name of the queue
1417 @type session: Session
1418 @param session: Optional SQLA session object (a temporary one will be
1419 generated if not supplied)
1422 @return: PolicyQueue object for the given queue
1425 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1429 except NoResultFound:
1432 __all__.append('get_policy_queue')
1434 ################################################################################
1436 class PolicyQueueUpload(object):
1437 def __cmp__(self, other):
1438 ret = cmp(self.changes.source, other.changes.source)
1440 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1442 if self.source is not None and other.source is None:
1444 elif self.source is None and other.source is not None:
1447 ret = cmp(self.changes.changesname, other.changes.changesname)
1450 __all__.append('PolicyQueueUpload')
1452 ################################################################################
1454 class PolicyQueueByhandFile(object):
1457 __all__.append('PolicyQueueByhandFile')
1459 ################################################################################
1461 class Priority(ORMObject):
1462 def __init__(self, priority = None, level = None):
1463 self.priority = priority
1466 def properties(self):
1467 return ['priority', 'priority_id', 'level', 'overrides_count']
1469 def not_null_constraints(self):
1470 return ['priority', 'level']
1472 def __eq__(self, val):
1473 if isinstance(val, str):
1474 return (self.priority == val)
1475 # This signals to use the normal comparison operator
1476 return NotImplemented
1478 def __ne__(self, val):
1479 if isinstance(val, str):
1480 return (self.priority != val)
1481 # This signals to use the normal comparison operator
1482 return NotImplemented
1484 __all__.append('Priority')
1487 def get_priority(priority, session=None):
1489 Returns Priority object for given C{priority name}.
1491 @type priority: string
1492 @param priority: The name of the priority
1494 @type session: Session
1495 @param session: Optional SQLA session object (a temporary one will be
1496 generated if not supplied)
1499 @return: Priority object for the given priority
1502 q = session.query(Priority).filter_by(priority=priority)
1506 except NoResultFound:
1509 __all__.append('get_priority')
1512 def get_priorities(session=None):
1514 Returns dictionary of priority names -> id mappings
1516 @type session: Session
1517 @param session: Optional SQL session object (a temporary one will be
1518 generated if not supplied)
1521 @return: dictionary of priority names -> id mappings
1525 q = session.query(Priority)
1527 ret[x.priority] = x.priority_id
1531 __all__.append('get_priorities')
1533 ################################################################################
1535 class Section(ORMObject):
1536 def __init__(self, section = None):
1537 self.section = section
1539 def properties(self):
1540 return ['section', 'section_id', 'overrides_count']
1542 def not_null_constraints(self):
1545 def __eq__(self, val):
1546 if isinstance(val, str):
1547 return (self.section == val)
1548 # This signals to use the normal comparison operator
1549 return NotImplemented
1551 def __ne__(self, val):
1552 if isinstance(val, str):
1553 return (self.section != val)
1554 # This signals to use the normal comparison operator
1555 return NotImplemented
1557 __all__.append('Section')
1560 def get_section(section, session=None):
1562 Returns Section object for given C{section name}.
1564 @type section: string
1565 @param section: The name of the section
1567 @type session: Session
1568 @param session: Optional SQLA session object (a temporary one will be
1569 generated if not supplied)
1572 @return: Section object for the given section name
1575 q = session.query(Section).filter_by(section=section)
1579 except NoResultFound:
1582 __all__.append('get_section')
1585 def get_sections(session=None):
1587 Returns dictionary of section names -> id mappings
1589 @type session: Session
1590 @param session: Optional SQL session object (a temporary one will be
1591 generated if not supplied)
1594 @return: dictionary of section names -> id mappings
1598 q = session.query(Section)
1600 ret[x.section] = x.section_id
1604 __all__.append('get_sections')
1606 ################################################################################
1608 class SignatureHistory(ORMObject):
1610 def from_signed_file(cls, signed_file):
1611 """signature history entry from signed file
1613 @type signed_file: L{daklib.gpg.SignedFile}
1614 @param signed_file: signed file
1616 @rtype: L{SignatureHistory}
1619 self.fingerprint = signed_file.primary_fingerprint
1620 self.signature_timestamp = signed_file.signature_timestamp
1621 self.contents_sha1 = signed_file.contents_sha1()
1624 def query(self, session):
1625 return session.query(SignatureHistory).filter_by(fingerprint=self.fingerprint, signature_timestamp=self.signature_timestamp, contents_sha1=self.contents_sha1).first()
1627 __all__.append('SignatureHistory')
1629 ################################################################################
1631 class SrcContents(ORMObject):
1632 def __init__(self, file = None, source = None):
1634 self.source = source
1636 def properties(self):
1637 return ['file', 'source']
1639 __all__.append('SrcContents')
1641 ################################################################################
1643 from debian.debfile import Deb822
1645 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1646 class Dak822(Deb822):
1647 def _internal_parser(self, sequence, fields=None):
1648 # The key is non-whitespace, non-colon characters before any colon.
1649 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1650 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1651 multi = re.compile(key_part + r"$")
1652 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1654 wanted_field = lambda f: fields is None or f in fields
1656 if isinstance(sequence, basestring):
1657 sequence = sequence.splitlines()
1661 for line in self.gpg_stripped_paragraph(sequence):
1662 m = single.match(line)
1665 self[curkey] = content
1667 if not wanted_field(m.group('key')):
1671 curkey = m.group('key')
1672 content = m.group('data')
1675 m = multi.match(line)
1678 self[curkey] = content
1680 if not wanted_field(m.group('key')):
1684 curkey = m.group('key')
1688 m = multidata.match(line)
1690 content += '\n' + line # XXX not m.group('data')?
1694 self[curkey] = content
1697 class DBSource(ORMObject):
1698 def __init__(self, source = None, version = None, maintainer = None, \
1699 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1700 self.source = source
1701 self.version = version
1702 self.maintainer = maintainer
1703 self.changedby = changedby
1704 self.poolfile = poolfile
1705 self.install_date = install_date
1706 self.fingerprint = fingerprint
1710 return self.source_id
1712 def properties(self):
1713 return ['source', 'source_id', 'maintainer', 'changedby', \
1714 'fingerprint', 'poolfile', 'version', 'suites_count', \
1715 'install_date', 'binaries_count', 'uploaders_count']
1717 def not_null_constraints(self):
1718 return ['source', 'version', 'install_date', 'maintainer', \
1719 'changedby', 'poolfile']
1721 def read_control_fields(self):
1723 Reads the control information from a dsc
1726 @return: fields is the dsc information in a dictionary form
1728 fullpath = self.poolfile.fullpath
1729 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1732 metadata = association_proxy('key', 'value')
1734 def scan_contents(self):
1736 Returns a set of names for non directories. The path names are
1737 normalized after converting them from either utf-8 or iso8859-1
1740 fullpath = self.poolfile.fullpath
1741 from daklib.contents import UnpackedSource
1742 unpacked = UnpackedSource(fullpath)
1744 for name in unpacked.get_all_filenames():
1745 # enforce proper utf-8 encoding
1747 name.decode('utf-8')
1748 except UnicodeDecodeError:
1749 name = name.decode('iso8859-1').encode('utf-8')
1755 session = object_session(self)
1756 query = session.query(SourceMetadata).filter_by(source=self)
1757 return MetadataProxy(session, query)
1759 __all__.append('DBSource')
1762 def get_suites_source_in(source, session=None):
1764 Returns list of Suite objects which given C{source} name is in
1767 @param source: DBSource package name to search for
1770 @return: list of Suite objects for the given source
1773 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
1775 __all__.append('get_suites_source_in')
1777 # FIXME: This function fails badly if it finds more than 1 source package and
1778 # its implementation is trivial enough to be inlined.
1780 def get_source_in_suite(source, suite_name, session=None):
1782 Returns a DBSource object for a combination of C{source} and C{suite_name}.
1784 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
1785 - B{suite_name} - a suite name, eg. I{unstable}
1787 @type source: string
1788 @param source: source package name
1790 @type suite_name: string
1791 @param suite: the suite name
1794 @return: the version for I{source} in I{suite}
1797 suite = get_suite(suite_name, session)
1801 return suite.get_sources(source).one()
1802 except NoResultFound:
1805 __all__.append('get_source_in_suite')
1808 def import_metadata_into_db(obj, session=None):
1810 This routine works on either DBBinary or DBSource objects and imports
1811 their metadata into the database
1813 fields = obj.read_control_fields()
1814 for k in fields.keys():
1817 val = str(fields[k])
1818 except UnicodeEncodeError:
1819 # Fall back to UTF-8
1821 val = fields[k].encode('utf-8')
1822 except UnicodeEncodeError:
1823 # Finally try iso8859-1
1824 val = fields[k].encode('iso8859-1')
1825 # Otherwise we allow the exception to percolate up and we cause
1826 # a reject as someone is playing silly buggers
1828 obj.metadata[get_or_set_metadatakey(k, session)] = val
1830 session.commit_or_flush()
1832 __all__.append('import_metadata_into_db')
1834 ################################################################################
1836 class SrcFormat(object):
1837 def __init__(self, *args, **kwargs):
1841 return '<SrcFormat %s>' % (self.format_name)
1843 __all__.append('SrcFormat')
1845 ################################################################################
1847 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
1848 ('SuiteID', 'suite_id'),
1849 ('Version', 'version'),
1850 ('Origin', 'origin'),
1852 ('Description', 'description'),
1853 ('Untouchable', 'untouchable'),
1854 ('Announce', 'announce'),
1855 ('Codename', 'codename'),
1856 ('OverrideCodename', 'overridecodename'),
1857 ('ValidTime', 'validtime'),
1858 ('Priority', 'priority'),
1859 ('NotAutomatic', 'notautomatic'),
1860 ('CopyChanges', 'copychanges'),
1861 ('OverrideSuite', 'overridesuite')]
1863 # Why the heck don't we have any UNIQUE constraints in table suite?
1864 # TODO: Add UNIQUE constraints for appropriate columns.
1865 class Suite(ORMObject):
1866 def __init__(self, suite_name = None, version = None):
1867 self.suite_name = suite_name
1868 self.version = version
1870 def properties(self):
1871 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
1874 def not_null_constraints(self):
1875 return ['suite_name']
1877 def __eq__(self, val):
1878 if isinstance(val, str):
1879 return (self.suite_name == val)
1880 # This signals to use the normal comparison operator
1881 return NotImplemented
1883 def __ne__(self, val):
1884 if isinstance(val, str):
1885 return (self.suite_name != val)
1886 # This signals to use the normal comparison operator
1887 return NotImplemented
1891 for disp, field in SUITE_FIELDS:
1892 val = getattr(self, field, None)
1894 ret.append("%s: %s" % (disp, val))
1896 return "\n".join(ret)
1898 def get_architectures(self, skipsrc=False, skipall=False):
1900 Returns list of Architecture objects
1902 @type skipsrc: boolean
1903 @param skipsrc: Whether to skip returning the 'source' architecture entry
1906 @type skipall: boolean
1907 @param skipall: Whether to skip returning the 'all' architecture entry
1911 @return: list of Architecture objects for the given name (may be empty)
1914 q = object_session(self).query(Architecture).with_parent(self)
1916 q = q.filter(Architecture.arch_string != 'source')
1918 q = q.filter(Architecture.arch_string != 'all')
1919 return q.order_by(Architecture.arch_string).all()
1921 def get_sources(self, source):
1923 Returns a query object representing DBSource that is part of C{suite}.
1925 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
1927 @type source: string
1928 @param source: source package name
1930 @rtype: sqlalchemy.orm.query.Query
1931 @return: a query of DBSource
1935 session = object_session(self)
1936 return session.query(DBSource).filter_by(source = source). \
1939 def get_overridesuite(self):
1940 if self.overridesuite is None:
1943 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
1947 return os.path.join(self.archive.path, 'dists', self.suite_name)
1950 def release_suite_output(self):
1951 if self.release_suite is not None:
1952 return self.release_suite
1953 return self.suite_name
1955 __all__.append('Suite')
1958 def get_suite(suite, session=None):
1960 Returns Suite object for given C{suite name}.
1963 @param suite: The name of the suite
1965 @type session: Session
1966 @param session: Optional SQLA session object (a temporary one will be
1967 generated if not supplied)
1970 @return: Suite object for the requested suite name (None if not present)
1973 # Start by looking for the dak internal name
1974 q = session.query(Suite).filter_by(suite_name=suite)
1977 except NoResultFound:
1981 q = session.query(Suite).filter_by(codename=suite)
1984 except NoResultFound:
1987 # Finally give release_suite a try
1988 q = session.query(Suite).filter_by(release_suite=suite)
1991 except NoResultFound:
1994 __all__.append('get_suite')
1996 ################################################################################
1999 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2001 Returns list of Architecture objects for given C{suite} name. The list is
2002 empty if suite does not exist.
2005 @param suite: Suite name to search for
2007 @type skipsrc: boolean
2008 @param skipsrc: Whether to skip returning the 'source' architecture entry
2011 @type skipall: boolean
2012 @param skipall: Whether to skip returning the 'all' architecture entry
2015 @type session: Session
2016 @param session: Optional SQL session object (a temporary one will be
2017 generated if not supplied)
2020 @return: list of Architecture objects for the given name (may be empty)
2024 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2025 except AttributeError:
2028 __all__.append('get_suite_architectures')
2030 ################################################################################
2032 class Uid(ORMObject):
2033 def __init__(self, uid = None, name = None):
2037 def __eq__(self, val):
2038 if isinstance(val, str):
2039 return (self.uid == val)
2040 # This signals to use the normal comparison operator
2041 return NotImplemented
2043 def __ne__(self, val):
2044 if isinstance(val, str):
2045 return (self.uid != val)
2046 # This signals to use the normal comparison operator
2047 return NotImplemented
2049 def properties(self):
2050 return ['uid', 'name', 'fingerprint']
2052 def not_null_constraints(self):
2055 __all__.append('Uid')
2058 def get_or_set_uid(uidname, session=None):
2060 Returns uid object for given uidname.
2062 If no matching uidname is found, a row is inserted.
2064 @type uidname: string
2065 @param uidname: The uid to add
2067 @type session: SQLAlchemy
2068 @param session: Optional SQL session object (a temporary one will be
2069 generated if not supplied). If not passed, a commit will be performed at
2070 the end of the function, otherwise the caller is responsible for commiting.
2073 @return: the uid object for the given uidname
2076 q = session.query(Uid).filter_by(uid=uidname)
2080 except NoResultFound:
2084 session.commit_or_flush()
2089 __all__.append('get_or_set_uid')
2092 def get_uid_from_fingerprint(fpr, session=None):
2093 q = session.query(Uid)
2094 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2098 except NoResultFound:
2101 __all__.append('get_uid_from_fingerprint')
2103 ################################################################################
2105 class MetadataKey(ORMObject):
2106 def __init__(self, key = None):
2109 def properties(self):
2112 def not_null_constraints(self):
2115 __all__.append('MetadataKey')
2118 def get_or_set_metadatakey(keyname, session=None):
2120 Returns MetadataKey object for given uidname.
2122 If no matching keyname is found, a row is inserted.
2124 @type uidname: string
2125 @param uidname: The keyname to add
2127 @type session: SQLAlchemy
2128 @param session: Optional SQL session object (a temporary one will be
2129 generated if not supplied). If not passed, a commit will be performed at
2130 the end of the function, otherwise the caller is responsible for commiting.
2133 @return: the metadatakey object for the given keyname
2136 q = session.query(MetadataKey).filter_by(key=keyname)
2140 except NoResultFound:
2141 ret = MetadataKey(keyname)
2143 session.commit_or_flush()
2147 __all__.append('get_or_set_metadatakey')
2149 ################################################################################
2151 class BinaryMetadata(ORMObject):
2152 def __init__(self, key = None, value = None, binary = None):
2155 self.binary = binary
2157 def properties(self):
2158 return ['binary', 'key', 'value']
2160 def not_null_constraints(self):
2163 __all__.append('BinaryMetadata')
2165 ################################################################################
2167 class SourceMetadata(ORMObject):
2168 def __init__(self, key = None, value = None, source = None):
2171 self.source = source
2173 def properties(self):
2174 return ['source', 'key', 'value']
2176 def not_null_constraints(self):
2179 __all__.append('SourceMetadata')
2181 ################################################################################
2183 class MetadataProxy(object):
2184 def __init__(self, session, query):
2185 self.session = session
2188 def _get(self, key):
2189 metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
2190 if metadata_key is None:
2192 metadata = self.query.filter_by(key=metadata_key).first()
2195 def __contains__(self, key):
2196 if self._get(key) is not None:
2200 def __getitem__(self, key):
2201 metadata = self._get(key)
2202 if metadata is None:
2204 return metadata.value
2206 def get(self, key, default=None):
2212 ################################################################################
2214 class VersionCheck(ORMObject):
2215 def __init__(self, *args, **kwargs):
2218 def properties(self):
2219 #return ['suite_id', 'check', 'reference_id']
2222 def not_null_constraints(self):
2223 return ['suite', 'check', 'reference']
2225 __all__.append('VersionCheck')
2228 def get_version_checks(suite_name, check = None, session = None):
2229 suite = get_suite(suite_name, session)
2231 # Make sure that what we return is iterable so that list comprehensions
2232 # involving this don't cause a traceback
2234 q = session.query(VersionCheck).filter_by(suite=suite)
2236 q = q.filter_by(check=check)
2239 __all__.append('get_version_checks')
2241 ################################################################################
2243 class DBConn(object):
2245 database module init.
2249 def __init__(self, *args, **kwargs):
2250 self.__dict__ = self.__shared_state
2252 if not getattr(self, 'initialised', False):
2253 self.initialised = True
2254 self.debug = kwargs.has_key('debug')
2257 def __setuptables(self):
2260 'acl_architecture_map',
2261 'acl_fingerprint_map',
2268 'binaries_metadata',
2276 'external_overrides',
2277 'extra_src_references',
2279 'files_archive_map',
2285 # TODO: the maintainer column in table override should be removed.
2289 'policy_queue_upload',
2290 'policy_queue_upload_binaries_map',
2291 'policy_queue_byhand_file',
2294 'signature_history',
2303 'suite_architectures',
2304 'suite_build_queue_copy',
2305 'suite_src_formats',
2311 'almost_obsolete_all_associations',
2312 'almost_obsolete_src_associations',
2313 'any_associations_source',
2314 'bin_associations_binaries',
2315 'binaries_suite_arch',
2318 'newest_all_associations',
2319 'newest_any_associations',
2321 'newest_src_association',
2322 'obsolete_all_associations',
2323 'obsolete_any_associations',
2324 'obsolete_any_by_all_associations',
2325 'obsolete_src_associations',
2328 'src_associations_bin',
2329 'src_associations_src',
2330 'suite_arch_by_name',
2333 for table_name in tables:
2334 table = Table(table_name, self.db_meta, \
2335 autoload=True, useexisting=True)
2336 setattr(self, 'tbl_%s' % table_name, table)
2338 for view_name in views:
2339 view = Table(view_name, self.db_meta, autoload=True)
2340 setattr(self, 'view_%s' % view_name, view)
2342 def __setupmappers(self):
2343 mapper(Architecture, self.tbl_architecture,
2344 properties = dict(arch_id = self.tbl_architecture.c.id,
2345 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2346 order_by=self.tbl_suite.c.suite_name,
2347 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2348 extension = validator)
2350 mapper(ACL, self.tbl_acl,
2352 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2353 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2354 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2355 per_source = relation(ACLPerSource, collection_class=set),
2358 mapper(ACLPerSource, self.tbl_acl_per_source,
2360 acl = relation(ACL),
2361 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2362 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2365 mapper(Archive, self.tbl_archive,
2366 properties = dict(archive_id = self.tbl_archive.c.id,
2367 archive_name = self.tbl_archive.c.name))
2369 mapper(ArchiveFile, self.tbl_files_archive_map,
2370 properties = dict(archive = relation(Archive, backref='files'),
2371 component = relation(Component),
2372 file = relation(PoolFile, backref='archives')))
2374 mapper(BuildQueue, self.tbl_build_queue,
2375 properties = dict(queue_id = self.tbl_build_queue.c.id,
2376 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2378 mapper(DBBinary, self.tbl_binaries,
2379 properties = dict(binary_id = self.tbl_binaries.c.id,
2380 package = self.tbl_binaries.c.package,
2381 version = self.tbl_binaries.c.version,
2382 maintainer_id = self.tbl_binaries.c.maintainer,
2383 maintainer = relation(Maintainer),
2384 source_id = self.tbl_binaries.c.source,
2385 source = relation(DBSource, backref='binaries'),
2386 arch_id = self.tbl_binaries.c.architecture,
2387 architecture = relation(Architecture),
2388 poolfile_id = self.tbl_binaries.c.file,
2389 poolfile = relation(PoolFile),
2390 binarytype = self.tbl_binaries.c.type,
2391 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2392 fingerprint = relation(Fingerprint),
2393 install_date = self.tbl_binaries.c.install_date,
2394 suites = relation(Suite, secondary=self.tbl_bin_associations,
2395 backref=backref('binaries', lazy='dynamic')),
2396 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2397 backref=backref('extra_binary_references', lazy='dynamic')),
2398 key = relation(BinaryMetadata, cascade='all',
2399 collection_class=attribute_mapped_collection('key'))),
2400 extension = validator)
2402 mapper(Component, self.tbl_component,
2403 properties = dict(component_id = self.tbl_component.c.id,
2404 component_name = self.tbl_component.c.name),
2405 extension = validator)
2407 mapper(DBConfig, self.tbl_config,
2408 properties = dict(config_id = self.tbl_config.c.id))
2410 mapper(DSCFile, self.tbl_dsc_files,
2411 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2412 source_id = self.tbl_dsc_files.c.source,
2413 source = relation(DBSource),
2414 poolfile_id = self.tbl_dsc_files.c.file,
2415 poolfile = relation(PoolFile)))
2417 mapper(ExternalOverride, self.tbl_external_overrides,
2419 suite_id = self.tbl_external_overrides.c.suite,
2420 suite = relation(Suite),
2421 component_id = self.tbl_external_overrides.c.component,
2422 component = relation(Component)))
2424 mapper(PoolFile, self.tbl_files,
2425 properties = dict(file_id = self.tbl_files.c.id,
2426 filesize = self.tbl_files.c.size),
2427 extension = validator)
2429 mapper(Fingerprint, self.tbl_fingerprint,
2430 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2431 uid_id = self.tbl_fingerprint.c.uid,
2432 uid = relation(Uid),
2433 keyring_id = self.tbl_fingerprint.c.keyring,
2434 keyring = relation(Keyring),
2435 acl = relation(ACL)),
2436 extension = validator)
2438 mapper(Keyring, self.tbl_keyrings,
2439 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2440 keyring_id = self.tbl_keyrings.c.id,
2441 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2443 mapper(DBChange, self.tbl_changes,
2444 properties = dict(change_id = self.tbl_changes.c.id,
2445 seen = self.tbl_changes.c.seen,
2446 source = self.tbl_changes.c.source,
2447 binaries = self.tbl_changes.c.binaries,
2448 architecture = self.tbl_changes.c.architecture,
2449 distribution = self.tbl_changes.c.distribution,
2450 urgency = self.tbl_changes.c.urgency,
2451 maintainer = self.tbl_changes.c.maintainer,
2452 changedby = self.tbl_changes.c.changedby,
2453 date = self.tbl_changes.c.date,
2454 version = self.tbl_changes.c.version))
2456 mapper(Maintainer, self.tbl_maintainer,
2457 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2458 maintains_sources = relation(DBSource, backref='maintainer',
2459 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2460 changed_sources = relation(DBSource, backref='changedby',
2461 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2462 extension = validator)
2464 mapper(NewComment, self.tbl_new_comments,
2465 properties = dict(comment_id = self.tbl_new_comments.c.id,
2466 policy_queue = relation(PolicyQueue)))
2468 mapper(Override, self.tbl_override,
2469 properties = dict(suite_id = self.tbl_override.c.suite,
2470 suite = relation(Suite, \
2471 backref=backref('overrides', lazy='dynamic')),
2472 package = self.tbl_override.c.package,
2473 component_id = self.tbl_override.c.component,
2474 component = relation(Component, \
2475 backref=backref('overrides', lazy='dynamic')),
2476 priority_id = self.tbl_override.c.priority,
2477 priority = relation(Priority, \
2478 backref=backref('overrides', lazy='dynamic')),
2479 section_id = self.tbl_override.c.section,
2480 section = relation(Section, \
2481 backref=backref('overrides', lazy='dynamic')),
2482 overridetype_id = self.tbl_override.c.type,
2483 overridetype = relation(OverrideType, \
2484 backref=backref('overrides', lazy='dynamic'))))
2486 mapper(OverrideType, self.tbl_override_type,
2487 properties = dict(overridetype = self.tbl_override_type.c.type,
2488 overridetype_id = self.tbl_override_type.c.id))
2490 mapper(PolicyQueue, self.tbl_policy_queue,
2491 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2492 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2494 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2496 changes = relation(DBChange),
2497 policy_queue = relation(PolicyQueue, backref='uploads'),
2498 target_suite = relation(Suite),
2499 source = relation(DBSource),
2500 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2503 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2505 upload = relation(PolicyQueueUpload, backref='byhand'),
2509 mapper(Priority, self.tbl_priority,
2510 properties = dict(priority_id = self.tbl_priority.c.id))
2512 mapper(Section, self.tbl_section,
2513 properties = dict(section_id = self.tbl_section.c.id,
2514 section=self.tbl_section.c.section))
2516 mapper(SignatureHistory, self.tbl_signature_history)
2518 mapper(DBSource, self.tbl_source,
2519 properties = dict(source_id = self.tbl_source.c.id,
2520 version = self.tbl_source.c.version,
2521 maintainer_id = self.tbl_source.c.maintainer,
2522 poolfile_id = self.tbl_source.c.file,
2523 poolfile = relation(PoolFile),
2524 fingerprint_id = self.tbl_source.c.sig_fpr,
2525 fingerprint = relation(Fingerprint),
2526 changedby_id = self.tbl_source.c.changedby,
2527 srcfiles = relation(DSCFile,
2528 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2529 suites = relation(Suite, secondary=self.tbl_src_associations,
2530 backref=backref('sources', lazy='dynamic')),
2531 uploaders = relation(Maintainer,
2532 secondary=self.tbl_src_uploaders),
2533 key = relation(SourceMetadata, cascade='all',
2534 collection_class=attribute_mapped_collection('key'))),
2535 extension = validator)
2537 mapper(SrcFormat, self.tbl_src_format,
2538 properties = dict(src_format_id = self.tbl_src_format.c.id,
2539 format_name = self.tbl_src_format.c.format_name))
2541 mapper(Suite, self.tbl_suite,
2542 properties = dict(suite_id = self.tbl_suite.c.id,
2543 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2544 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2545 copy_queues = relation(BuildQueue,
2546 secondary=self.tbl_suite_build_queue_copy),
2547 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2548 backref=backref('suites', lazy='dynamic')),
2549 archive = relation(Archive, backref='suites'),
2550 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2551 components = relation(Component, secondary=self.tbl_component_suite,
2552 order_by=self.tbl_component.c.ordering,
2553 backref=backref('suites'))),
2554 extension = validator)
2556 mapper(Uid, self.tbl_uid,
2557 properties = dict(uid_id = self.tbl_uid.c.id,
2558 fingerprint = relation(Fingerprint)),
2559 extension = validator)
2561 mapper(BinContents, self.tbl_bin_contents,
2563 binary = relation(DBBinary,
2564 backref=backref('contents', lazy='dynamic', cascade='all')),
2565 file = self.tbl_bin_contents.c.file))
2567 mapper(SrcContents, self.tbl_src_contents,
2569 source = relation(DBSource,
2570 backref=backref('contents', lazy='dynamic', cascade='all')),
2571 file = self.tbl_src_contents.c.file))
2573 mapper(MetadataKey, self.tbl_metadata_keys,
2575 key_id = self.tbl_metadata_keys.c.key_id,
2576 key = self.tbl_metadata_keys.c.key))
2578 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2580 binary_id = self.tbl_binaries_metadata.c.bin_id,
2581 binary = relation(DBBinary),
2582 key_id = self.tbl_binaries_metadata.c.key_id,
2583 key = relation(MetadataKey),
2584 value = self.tbl_binaries_metadata.c.value))
2586 mapper(SourceMetadata, self.tbl_source_metadata,
2588 source_id = self.tbl_source_metadata.c.src_id,
2589 source = relation(DBSource),
2590 key_id = self.tbl_source_metadata.c.key_id,
2591 key = relation(MetadataKey),
2592 value = self.tbl_source_metadata.c.value))
2594 mapper(VersionCheck, self.tbl_version_check,
2596 suite_id = self.tbl_version_check.c.suite,
2597 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2598 reference_id = self.tbl_version_check.c.reference,
2599 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2601 ## Connection functions
2602 def __createconn(self):
2603 from config import Config
2605 if cnf.has_key("DB::Service"):
2606 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2607 elif cnf.has_key("DB::Host"):
2609 connstr = "postgresql://%s" % cnf["DB::Host"]
2610 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2611 connstr += ":%s" % cnf["DB::Port"]
2612 connstr += "/%s" % cnf["DB::Name"]
2615 connstr = "postgresql:///%s" % cnf["DB::Name"]
2616 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2617 connstr += "?port=%s" % cnf["DB::Port"]
2619 engine_args = { 'echo': self.debug }
2620 if cnf.has_key('DB::PoolSize'):
2621 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2622 if cnf.has_key('DB::MaxOverflow'):
2623 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2624 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2625 cnf['DB::Unicode'] == 'false':
2626 engine_args['use_native_unicode'] = False
2628 # Monkey patch a new dialect in in order to support service= syntax
2629 import sqlalchemy.dialects.postgresql
2630 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2631 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2632 def create_connect_args(self, url):
2633 if str(url).startswith('postgresql://service='):
2635 servicename = str(url)[21:]
2636 return (['service=%s' % servicename], {})
2638 return PGDialect_psycopg2.create_connect_args(self, url)
2640 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2643 self.db_pg = create_engine(connstr, **engine_args)
2644 self.db_meta = MetaData()
2645 self.db_meta.bind = self.db_pg
2646 self.db_smaker = sessionmaker(bind=self.db_pg,
2650 self.__setuptables()
2651 self.__setupmappers()
2653 except OperationalError as e:
2655 utils.fubar("Cannot connect to database (%s)" % str(e))
2657 self.pid = os.getpid()
2659 def session(self, work_mem = 0):
2661 Returns a new session object. If a work_mem parameter is provided a new
2662 transaction is started and the work_mem parameter is set for this
2663 transaction. The work_mem parameter is measured in MB. A default value
2664 will be used if the parameter is not set.
2666 # reinitialize DBConn in new processes
2667 if self.pid != os.getpid():
2670 session = self.db_smaker()
2672 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2675 __all__.append('DBConn')