5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
86 ################################################################################
88 # Patch in support for the debversion field type so that it works during
92 # that is for sqlalchemy 0.6
93 UserDefinedType = sqltypes.UserDefinedType
95 # this one for sqlalchemy 0.5
96 UserDefinedType = sqltypes.TypeEngine
98 class DebVersion(UserDefinedType):
99 def get_col_spec(self):
102 def bind_processor(self, dialect):
105 # ' = None' is needed for sqlalchemy 0.5:
106 def result_processor(self, dialect, coltype = None):
109 sa_major_version = sqlalchemy.__version__[0:3]
110 if sa_major_version in ["0.5", "0.6"]:
111 from sqlalchemy.databases import postgres
112 postgres.ischema_names['debversion'] = DebVersion
114 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
116 ################################################################################
118 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
120 ################################################################################
122 def session_wrapper(fn):
124 Wrapper around common ".., session=None):" handling. If the wrapped
125 function is called without passing 'session', we create a local one
126 and destroy it when the function ends.
128 Also attaches a commit_or_flush method to the session; if we created a
129 local session, this is a synonym for session.commit(), otherwise it is a
130 synonym for session.flush().
133 def wrapped(*args, **kwargs):
134 private_transaction = False
136 # Find the session object
137 session = kwargs.get('session')
140 if len(args) <= len(getargspec(fn)[0]) - 1:
141 # No session specified as last argument or in kwargs
142 private_transaction = True
143 session = kwargs['session'] = DBConn().session()
145 # Session is last argument in args
149 session = args[-1] = DBConn().session()
150 private_transaction = True
152 if private_transaction:
153 session.commit_or_flush = session.commit
155 session.commit_or_flush = session.flush
158 return fn(*args, **kwargs)
160 if private_transaction:
161 # We created a session; close it.
164 wrapped.__doc__ = fn.__doc__
165 wrapped.func_name = fn.func_name
169 __all__.append('session_wrapper')
171 ################################################################################
173 class ORMObject(object):
175 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
176 derived classes must implement the properties() method.
179 def properties(self):
181 This method should be implemented by all derived classes and returns a
182 list of the important properties. The properties 'created' and
183 'modified' will be added automatically. A suffix '_count' should be
184 added to properties that are lists or query objects. The most important
185 property name should be returned as the first element in the list
186 because it is used by repr().
192 Returns a JSON representation of the object based on the properties
193 returned from the properties() method.
196 # add created and modified
197 all_properties = self.properties() + ['created', 'modified']
198 for property in all_properties:
199 # check for list or query
200 if property[-6:] == '_count':
201 real_property = property[:-6]
202 if not hasattr(self, real_property):
204 value = getattr(self, real_property)
205 if hasattr(value, '__len__'):
208 elif hasattr(value, 'count'):
209 # query (but not during validation)
210 if self.in_validation:
212 value = value.count()
214 raise KeyError('Do not understand property %s.' % property)
216 if not hasattr(self, property):
219 value = getattr(self, property)
223 elif isinstance(value, ORMObject):
224 # use repr() for ORMObject types
227 # we want a string for all other types because json cannot
230 data[property] = value
231 return json.dumps(data)
235 Returns the name of the class.
237 return type(self).__name__
241 Returns a short string representation of the object using the first
242 element from the properties() method.
244 primary_property = self.properties()[0]
245 value = getattr(self, primary_property)
246 return '<%s %s>' % (self.classname(), str(value))
250 Returns a human readable form of the object using the properties()
253 return '<%s %s>' % (self.classname(), self.json())
255 def not_null_constraints(self):
257 Returns a list of properties that must be not NULL. Derived classes
258 should override this method if needed.
262 validation_message = \
263 "Validation failed because property '%s' must not be empty in object\n%s"
265 in_validation = False
269 This function validates the not NULL constraints as returned by
270 not_null_constraints(). It raises the DBUpdateError exception if
273 for property in self.not_null_constraints():
274 # TODO: It is a bit awkward that the mapper configuration allow
275 # directly setting the numeric _id columns. We should get rid of it
277 if hasattr(self, property + '_id') and \
278 getattr(self, property + '_id') is not None:
280 if not hasattr(self, property) or getattr(self, property) is None:
281 # str() might lead to races due to a 2nd flush
282 self.in_validation = True
283 message = self.validation_message % (property, str(self))
284 self.in_validation = False
285 raise DBUpdateError(message)
289 def get(cls, primary_key, session = None):
291 This is a support function that allows getting an object by its primary
294 Architecture.get(3[, session])
296 instead of the more verbose
298 session.query(Architecture).get(3)
300 return session.query(cls).get(primary_key)
302 def session(self, replace = False):
304 Returns the current session that is associated with the object. May
305 return None is object is in detached state.
308 return object_session(self)
310 def clone(self, session = None):
312 Clones the current object in a new session and returns the new clone. A
313 fresh session is created if the optional session parameter is not
314 provided. The function will fail if a session is provided and has
317 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
318 an existing object to allow several threads to work with their own
319 instances of an ORMObject.
321 WARNING: Only persistent (committed) objects can be cloned. Changes
322 made to the original object that are not committed yet will get lost.
323 The session of the new object will always be rolled back to avoid
327 if self.session() is None:
328 raise RuntimeError( \
329 'Method clone() failed for detached object:\n%s' % self)
330 self.session().flush()
331 mapper = object_mapper(self)
332 primary_key = mapper.primary_key_from_instance(self)
333 object_class = self.__class__
335 session = DBConn().session()
336 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
337 raise RuntimeError( \
338 'Method clone() failed due to unflushed changes in session.')
339 new_object = session.query(object_class).get(primary_key)
341 if new_object is None:
342 raise RuntimeError( \
343 'Method clone() failed for non-persistent object:\n%s' % self)
346 __all__.append('ORMObject')
348 ################################################################################
350 class Validator(MapperExtension):
352 This class calls the validate() method for each instance for the
353 'before_update' and 'before_insert' events. A global object validator is
354 used for configuring the individual mappers.
357 def before_update(self, mapper, connection, instance):
361 def before_insert(self, mapper, connection, instance):
365 validator = Validator()
367 ################################################################################
369 class Architecture(ORMObject):
370 def __init__(self, arch_string = None, description = None):
371 self.arch_string = arch_string
372 self.description = description
374 def __eq__(self, val):
375 if isinstance(val, str):
376 return (self.arch_string== val)
377 # This signals to use the normal comparison operator
378 return NotImplemented
380 def __ne__(self, val):
381 if isinstance(val, str):
382 return (self.arch_string != val)
383 # This signals to use the normal comparison operator
384 return NotImplemented
386 def properties(self):
387 return ['arch_string', 'arch_id', 'suites_count']
389 def not_null_constraints(self):
390 return ['arch_string']
392 __all__.append('Architecture')
395 def get_architecture(architecture, session=None):
397 Returns database id for given C{architecture}.
399 @type architecture: string
400 @param architecture: The name of the architecture
402 @type session: Session
403 @param session: Optional SQLA session object (a temporary one will be
404 generated if not supplied)
407 @return: Architecture object for the given arch (None if not present)
410 q = session.query(Architecture).filter_by(arch_string=architecture)
414 except NoResultFound:
417 __all__.append('get_architecture')
419 # TODO: should be removed because the implementation is too trivial
421 def get_architecture_suites(architecture, session=None):
423 Returns list of Suite objects for given C{architecture} name
425 @type architecture: str
426 @param architecture: Architecture name to search for
428 @type session: Session
429 @param session: Optional SQL session object (a temporary one will be
430 generated if not supplied)
433 @return: list of Suite objects for the given name (may be empty)
436 return get_architecture(architecture, session).suites
438 __all__.append('get_architecture_suites')
440 ################################################################################
442 class Archive(object):
443 def __init__(self, *args, **kwargs):
447 return '<Archive %s>' % self.archive_name
449 __all__.append('Archive')
452 def get_archive(archive, session=None):
454 returns database id for given C{archive}.
456 @type archive: string
457 @param archive: the name of the arhive
459 @type session: Session
460 @param session: Optional SQLA session object (a temporary one will be
461 generated if not supplied)
464 @return: Archive object for the given name (None if not present)
467 archive = archive.lower()
469 q = session.query(Archive).filter_by(archive_name=archive)
473 except NoResultFound:
476 __all__.append('get_archive')
478 ################################################################################
480 class BinContents(ORMObject):
481 def __init__(self, file = None, binary = None):
485 def properties(self):
486 return ['file', 'binary']
488 __all__.append('BinContents')
490 ################################################################################
492 def subprocess_setup():
493 # Python installs a SIGPIPE handler by default. This is usually not what
494 # non-Python subprocesses expect.
495 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
497 class DBBinary(ORMObject):
498 def __init__(self, package = None, source = None, version = None, \
499 maintainer = None, architecture = None, poolfile = None, \
501 self.package = package
503 self.version = version
504 self.maintainer = maintainer
505 self.architecture = architecture
506 self.poolfile = poolfile
507 self.binarytype = binarytype
511 return self.binary_id
513 def properties(self):
514 return ['package', 'version', 'maintainer', 'source', 'architecture', \
515 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
516 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
518 def not_null_constraints(self):
519 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
522 metadata = association_proxy('key', 'value')
524 def get_component_name(self):
525 return self.poolfile.location.component.component_name
527 def scan_contents(self):
529 Yields the contents of the package. Only regular files are yielded and
530 the path names are normalized after converting them from either utf-8
531 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
532 package does not contain any regular file.
534 fullpath = self.poolfile.fullpath
535 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
536 preexec_fn = subprocess_setup)
537 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
538 for member in tar.getmembers():
539 if not member.isdir():
540 name = normpath(member.name)
541 # enforce proper utf-8 encoding
544 except UnicodeDecodeError:
545 name = name.decode('iso8859-1').encode('utf-8')
551 def read_control(self):
553 Reads the control information from a binary.
556 @return: stanza text of the control section.
559 fullpath = self.poolfile.fullpath
560 deb_file = open(fullpath, 'r')
561 stanza = apt_inst.debExtractControl(deb_file)
566 def read_control_fields(self):
568 Reads the control information from a binary and return
572 @return: fields of the control section as a dictionary.
575 stanza = self.read_control()
576 return apt_pkg.TagSection(stanza)
578 __all__.append('DBBinary')
581 def get_suites_binary_in(package, session=None):
583 Returns list of Suite objects which given C{package} name is in
586 @param package: DBBinary package name to search for
589 @return: list of Suite objects for the given package
592 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
594 __all__.append('get_suites_binary_in')
597 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
599 Returns the component name of the newest binary package in suite_list or
600 None if no package is found. The result can be optionally filtered by a list
601 of architecture names.
604 @param package: DBBinary package name to search for
606 @type suite_list: list of str
607 @param suite_list: list of suite_name items
609 @type arch_list: list of str
610 @param arch_list: optional list of arch_string items that defaults to []
612 @rtype: str or NoneType
613 @return: name of component or None
616 q = session.query(DBBinary).filter_by(package = package). \
617 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
618 if len(arch_list) > 0:
619 q = q.join(DBBinary.architecture). \
620 filter(Architecture.arch_string.in_(arch_list))
621 binary = q.order_by(desc(DBBinary.version)).first()
625 return binary.get_component_name()
627 __all__.append('get_component_by_package_suite')
629 ################################################################################
631 class BinaryACL(object):
632 def __init__(self, *args, **kwargs):
636 return '<BinaryACL %s>' % self.binary_acl_id
638 __all__.append('BinaryACL')
640 ################################################################################
642 class BinaryACLMap(object):
643 def __init__(self, *args, **kwargs):
647 return '<BinaryACLMap %s>' % self.binary_acl_map_id
649 __all__.append('BinaryACLMap')
651 ################################################################################
656 ArchiveDir "%(archivepath)s";
657 OverrideDir "%(overridedir)s";
658 CacheDir "%(cachedir)s";
663 Packages::Compress ". bzip2 gzip";
664 Sources::Compress ". bzip2 gzip";
669 bindirectory "incoming"
674 BinOverride "override.sid.all3";
675 BinCacheDB "packages-accepted.db";
677 FileList "%(filelist)s";
680 Packages::Extensions ".deb .udeb";
683 bindirectory "incoming/"
686 BinOverride "override.sid.all3";
687 SrcOverride "override.sid.all3.src";
688 FileList "%(filelist)s";
692 class BuildQueue(object):
693 def __init__(self, *args, **kwargs):
697 return '<BuildQueue %s>' % self.queue_name
699 def write_metadata(self, starttime, force=False):
700 # Do we write out metafiles?
701 if not (force or self.generate_metadata):
704 session = DBConn().session().object_session(self)
706 fl_fd = fl_name = ac_fd = ac_name = None
708 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
709 startdir = os.getcwd()
712 # Grab files we want to include
713 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
714 newer += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
715 # Write file list with newer files
716 (fl_fd, fl_name) = mkstemp()
718 os.write(fl_fd, '%s\n' % n.fullpath)
723 # Write minimal apt.conf
724 # TODO: Remove hardcoding from template
725 (ac_fd, ac_name) = mkstemp()
726 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
728 'cachedir': cnf["Dir::Cache"],
729 'overridedir': cnf["Dir::Override"],
733 # Run apt-ftparchive generate
734 os.chdir(os.path.dirname(ac_name))
735 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
737 # Run apt-ftparchive release
738 # TODO: Eww - fix this
739 bname = os.path.basename(self.path)
743 # We have to remove the Release file otherwise it'll be included in the
746 os.unlink(os.path.join(bname, 'Release'))
750 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
752 # Crude hack with open and append, but this whole section is and should be redone.
753 if self.notautomatic:
754 release=open("Release", "a")
755 release.write("NotAutomatic: yes")
760 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
761 if cnf.has_key("Dinstall::SigningPubKeyring"):
762 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
764 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
766 # Move the files if we got this far
767 os.rename('Release', os.path.join(bname, 'Release'))
769 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
771 # Clean up any left behind files
798 def clean_and_update(self, starttime, Logger, dryrun=False):
799 """WARNING: This routine commits for you"""
800 session = DBConn().session().object_session(self)
802 if self.generate_metadata and not dryrun:
803 self.write_metadata(starttime)
805 # Grab files older than our execution time
806 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
807 older += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
813 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
815 Logger.log(["I: Removing %s from the queue" % o.fullpath])
816 os.unlink(o.fullpath)
819 # If it wasn't there, don't worry
820 if e.errno == ENOENT:
823 # TODO: Replace with proper logging call
824 Logger.log(["E: Could not remove %s" % o.fullpath])
831 for f in os.listdir(self.path):
832 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
835 if not self.contains_filename(f):
836 fp = os.path.join(self.path, f)
838 Logger.log(["I: Would remove unused link %s" % fp])
840 Logger.log(["I: Removing unused link %s" % fp])
844 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
846 def contains_filename(self, filename):
849 @returns True if filename is supposed to be in the queue; False otherwise
851 session = DBConn().session().object_session(self)
852 if session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id, filename = filename).count() > 0:
854 elif session.query(BuildQueuePolicyFile).filter_by(build_queue = self, filename = filename).count() > 0:
858 def add_file_from_pool(self, poolfile):
859 """Copies a file into the pool. Assumes that the PoolFile object is
860 attached to the same SQLAlchemy session as the Queue object is.
862 The caller is responsible for committing after calling this function."""
863 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
865 # Check if we have a file of this name or this ID already
866 for f in self.queuefiles:
867 if (f.fileid is not None and f.fileid == poolfile.file_id) or \
868 (f.poolfile is not None and f.poolfile.filename == poolfile_basename):
869 # In this case, update the BuildQueueFile entry so we
870 # don't remove it too early
871 f.lastused = datetime.now()
872 DBConn().session().object_session(poolfile).add(f)
875 # Prepare BuildQueueFile object
876 qf = BuildQueueFile()
877 qf.build_queue_id = self.queue_id
878 qf.lastused = datetime.now()
879 qf.filename = poolfile_basename
881 targetpath = poolfile.fullpath
882 queuepath = os.path.join(self.path, poolfile_basename)
886 # We need to copy instead of symlink
888 utils.copy(targetpath, queuepath)
889 # NULL in the fileid field implies a copy
892 os.symlink(targetpath, queuepath)
893 qf.fileid = poolfile.file_id
894 except FileExistsError:
895 if not poolfile.identical_to(queuepath):
900 # Get the same session as the PoolFile is using and add the qf to it
901 DBConn().session().object_session(poolfile).add(qf)
905 def add_changes_from_policy_queue(self, policyqueue, changes):
907 Copies a changes from a policy queue together with its poolfiles.
909 @type policyqueue: PolicyQueue
910 @param policyqueue: policy queue to copy the changes from
912 @type changes: DBChange
913 @param changes: changes to copy to this build queue
915 for policyqueuefile in changes.files:
916 self.add_file_from_policy_queue(policyqueue, policyqueuefile)
917 for poolfile in changes.poolfiles:
918 self.add_file_from_pool(poolfile)
920 def add_file_from_policy_queue(self, policyqueue, policyqueuefile):
922 Copies a file from a policy queue.
923 Assumes that the policyqueuefile is attached to the same SQLAlchemy
924 session as the Queue object is. The caller is responsible for
925 committing after calling this function.
927 @type policyqueue: PolicyQueue
928 @param policyqueue: policy queue to copy the file from
930 @type policyqueuefile: ChangePendingFile
931 @param policyqueuefile: file to be added to the build queue
933 session = DBConn().session().object_session(policyqueuefile)
935 # Is the file already there?
937 f = session.query(BuildQueuePolicyFile).filter_by(build_queue=self, file=policyqueuefile).one()
938 f.lastused = datetime.now()
940 except NoResultFound:
941 pass # continue below
943 # We have to add the file.
944 f = BuildQueuePolicyFile()
946 f.file = policyqueuefile
947 f.filename = policyqueuefile.filename
949 source = os.path.join(policyqueue.path, policyqueuefile.filename)
952 # Always copy files from policy queues as they might move around.
954 utils.copy(source, target)
955 except FileExistsError:
956 if not policyqueuefile.identical_to(target):
964 __all__.append('BuildQueue')
967 def get_build_queue(queuename, session=None):
969 Returns BuildQueue object for given C{queue name}, creating it if it does not
972 @type queuename: string
973 @param queuename: The name of the queue
975 @type session: Session
976 @param session: Optional SQLA session object (a temporary one will be
977 generated if not supplied)
980 @return: BuildQueue object for the given queue
983 q = session.query(BuildQueue).filter_by(queue_name=queuename)
987 except NoResultFound:
990 __all__.append('get_build_queue')
992 ################################################################################
994 class BuildQueueFile(object):
996 BuildQueueFile represents a file in a build queue coming from a pool.
999 def __init__(self, *args, **kwargs):
1003 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
1007 return os.path.join(self.buildqueue.path, self.filename)
1010 __all__.append('BuildQueueFile')
1012 ################################################################################
1014 class BuildQueuePolicyFile(object):
1016 BuildQueuePolicyFile represents a file in a build queue that comes from a
1017 policy queue (and not a pool).
1020 def __init__(self, *args, **kwargs):
1024 #def filename(self):
1025 # return self.file.filename
1029 return os.path.join(self.build_queue.path, self.filename)
1031 __all__.append('BuildQueuePolicyFile')
1033 ################################################################################
1035 class ChangePendingBinary(object):
1036 def __init__(self, *args, **kwargs):
1040 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
1042 __all__.append('ChangePendingBinary')
1044 ################################################################################
1046 class ChangePendingFile(object):
1047 def __init__(self, *args, **kwargs):
1051 return '<ChangePendingFile %s>' % self.change_pending_file_id
1053 def identical_to(self, filename):
1055 compare size and hash with the given file
1058 @return: true if the given file has the same size and hash as this object; false otherwise
1060 st = os.stat(filename)
1061 if self.size != st.st_size:
1064 f = open(filename, "r")
1065 sha256sum = apt_pkg.sha256sum(f)
1066 if sha256sum != self.sha256sum:
1071 __all__.append('ChangePendingFile')
1073 ################################################################################
1075 class ChangePendingSource(object):
1076 def __init__(self, *args, **kwargs):
1080 return '<ChangePendingSource %s>' % self.change_pending_source_id
1082 __all__.append('ChangePendingSource')
1084 ################################################################################
1086 class Component(ORMObject):
1087 def __init__(self, component_name = None):
1088 self.component_name = component_name
1090 def __eq__(self, val):
1091 if isinstance(val, str):
1092 return (self.component_name == val)
1093 # This signals to use the normal comparison operator
1094 return NotImplemented
1096 def __ne__(self, val):
1097 if isinstance(val, str):
1098 return (self.component_name != val)
1099 # This signals to use the normal comparison operator
1100 return NotImplemented
1102 def properties(self):
1103 return ['component_name', 'component_id', 'description', \
1104 'location_count', 'meets_dfsg', 'overrides_count']
1106 def not_null_constraints(self):
1107 return ['component_name']
1110 __all__.append('Component')
1113 def get_component(component, session=None):
1115 Returns database id for given C{component}.
1117 @type component: string
1118 @param component: The name of the override type
1121 @return: the database id for the given component
1124 component = component.lower()
1126 q = session.query(Component).filter_by(component_name=component)
1130 except NoResultFound:
1133 __all__.append('get_component')
1135 ################################################################################
1137 class DBConfig(object):
1138 def __init__(self, *args, **kwargs):
1142 return '<DBConfig %s>' % self.name
1144 __all__.append('DBConfig')
1146 ################################################################################
1149 def get_or_set_contents_file_id(filename, session=None):
1151 Returns database id for given filename.
1153 If no matching file is found, a row is inserted.
1155 @type filename: string
1156 @param filename: The filename
1157 @type session: SQLAlchemy
1158 @param session: Optional SQL session object (a temporary one will be
1159 generated if not supplied). If not passed, a commit will be performed at
1160 the end of the function, otherwise the caller is responsible for commiting.
1163 @return: the database id for the given component
1166 q = session.query(ContentFilename).filter_by(filename=filename)
1169 ret = q.one().cafilename_id
1170 except NoResultFound:
1171 cf = ContentFilename()
1172 cf.filename = filename
1174 session.commit_or_flush()
1175 ret = cf.cafilename_id
1179 __all__.append('get_or_set_contents_file_id')
1182 def get_contents(suite, overridetype, section=None, session=None):
1184 Returns contents for a suite / overridetype combination, limiting
1185 to a section if not None.
1188 @param suite: Suite object
1190 @type overridetype: OverrideType
1191 @param overridetype: OverrideType object
1193 @type section: Section
1194 @param section: Optional section object to limit results to
1196 @type session: SQLAlchemy
1197 @param session: Optional SQL session object (a temporary one will be
1198 generated if not supplied)
1200 @rtype: ResultsProxy
1201 @return: ResultsProxy object set up to return tuples of (filename, section,
1205 # find me all of the contents for a given suite
1206 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1210 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1211 JOIN content_file_names n ON (c.filename=n.id)
1212 JOIN binaries b ON (b.id=c.binary_pkg)
1213 JOIN override o ON (o.package=b.package)
1214 JOIN section s ON (s.id=o.section)
1215 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1216 AND b.type=:overridetypename"""
1218 vals = {'suiteid': suite.suite_id,
1219 'overridetypeid': overridetype.overridetype_id,
1220 'overridetypename': overridetype.overridetype}
1222 if section is not None:
1223 contents_q += " AND s.id = :sectionid"
1224 vals['sectionid'] = section.section_id
1226 contents_q += " ORDER BY fn"
1228 return session.execute(contents_q, vals)
1230 __all__.append('get_contents')
1232 ################################################################################
1234 class ContentFilepath(object):
1235 def __init__(self, *args, **kwargs):
1239 return '<ContentFilepath %s>' % self.filepath
1241 __all__.append('ContentFilepath')
1244 def get_or_set_contents_path_id(filepath, session=None):
1246 Returns database id for given path.
1248 If no matching file is found, a row is inserted.
1250 @type filepath: string
1251 @param filepath: The filepath
1253 @type session: SQLAlchemy
1254 @param session: Optional SQL session object (a temporary one will be
1255 generated if not supplied). If not passed, a commit will be performed at
1256 the end of the function, otherwise the caller is responsible for commiting.
1259 @return: the database id for the given path
1262 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1265 ret = q.one().cafilepath_id
1266 except NoResultFound:
1267 cf = ContentFilepath()
1268 cf.filepath = filepath
1270 session.commit_or_flush()
1271 ret = cf.cafilepath_id
1275 __all__.append('get_or_set_contents_path_id')
1277 ################################################################################
1279 class ContentAssociation(object):
1280 def __init__(self, *args, **kwargs):
1284 return '<ContentAssociation %s>' % self.ca_id
1286 __all__.append('ContentAssociation')
1288 def insert_content_paths(binary_id, fullpaths, session=None):
1290 Make sure given path is associated with given binary id
1292 @type binary_id: int
1293 @param binary_id: the id of the binary
1294 @type fullpaths: list
1295 @param fullpaths: the list of paths of the file being associated with the binary
1296 @type session: SQLAlchemy session
1297 @param session: Optional SQLAlchemy session. If this is passed, the caller
1298 is responsible for ensuring a transaction has begun and committing the
1299 results or rolling back based on the result code. If not passed, a commit
1300 will be performed at the end of the function, otherwise the caller is
1301 responsible for commiting.
1303 @return: True upon success
1306 privatetrans = False
1308 session = DBConn().session()
1313 def generate_path_dicts():
1314 for fullpath in fullpaths:
1315 if fullpath.startswith( './' ):
1316 fullpath = fullpath[2:]
1318 yield {'filename':fullpath, 'id': binary_id }
1320 for d in generate_path_dicts():
1321 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1330 traceback.print_exc()
1332 # Only rollback if we set up the session ourself
1339 __all__.append('insert_content_paths')
1341 ################################################################################
1343 class DSCFile(object):
1344 def __init__(self, *args, **kwargs):
1348 return '<DSCFile %s>' % self.dscfile_id
1350 __all__.append('DSCFile')
1353 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1355 Returns a list of DSCFiles which may be empty
1357 @type dscfile_id: int (optional)
1358 @param dscfile_id: the dscfile_id of the DSCFiles to find
1360 @type source_id: int (optional)
1361 @param source_id: the source id related to the DSCFiles to find
1363 @type poolfile_id: int (optional)
1364 @param poolfile_id: the poolfile id related to the DSCFiles to find
1367 @return: Possibly empty list of DSCFiles
1370 q = session.query(DSCFile)
1372 if dscfile_id is not None:
1373 q = q.filter_by(dscfile_id=dscfile_id)
1375 if source_id is not None:
1376 q = q.filter_by(source_id=source_id)
1378 if poolfile_id is not None:
1379 q = q.filter_by(poolfile_id=poolfile_id)
1383 __all__.append('get_dscfiles')
1385 ################################################################################
1387 class ExternalOverride(ORMObject):
1388 def __init__(self, *args, **kwargs):
1392 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1394 __all__.append('ExternalOverride')
1396 ################################################################################
1398 class PoolFile(ORMObject):
1399 def __init__(self, filename = None, location = None, filesize = -1, \
1401 self.filename = filename
1402 self.location = location
1403 self.filesize = filesize
1404 self.md5sum = md5sum
1408 return os.path.join(self.location.path, self.filename)
1410 def is_valid(self, filesize = -1, md5sum = None):
1411 return self.filesize == long(filesize) and self.md5sum == md5sum
1413 def properties(self):
1414 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1415 'sha256sum', 'location', 'source', 'binary', 'last_used']
1417 def not_null_constraints(self):
1418 return ['filename', 'md5sum', 'location']
1420 def identical_to(self, filename):
1422 compare size and hash with the given file
1425 @return: true if the given file has the same size and hash as this object; false otherwise
1427 st = os.stat(filename)
1428 if self.filesize != st.st_size:
1431 f = open(filename, "r")
1432 sha256sum = apt_pkg.sha256sum(f)
1433 if sha256sum != self.sha256sum:
1438 __all__.append('PoolFile')
1441 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1444 (ValidFileFound [boolean], PoolFile object or None)
1446 @type filename: string
1447 @param filename: the filename of the file to check against the DB
1450 @param filesize: the size of the file to check against the DB
1452 @type md5sum: string
1453 @param md5sum: the md5sum of the file to check against the DB
1455 @type location_id: int
1456 @param location_id: the id of the location to look in
1459 @return: Tuple of length 2.
1460 - If valid pool file found: (C{True}, C{PoolFile object})
1461 - If valid pool file not found:
1462 - (C{False}, C{None}) if no file found
1463 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1466 poolfile = session.query(Location).get(location_id). \
1467 files.filter_by(filename=filename).first()
1469 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1472 return (valid, poolfile)
1474 __all__.append('check_poolfile')
1476 # TODO: the implementation can trivially be inlined at the place where the
1477 # function is called
1479 def get_poolfile_by_id(file_id, session=None):
1481 Returns a PoolFile objects or None for the given id
1484 @param file_id: the id of the file to look for
1486 @rtype: PoolFile or None
1487 @return: either the PoolFile object or None
1490 return session.query(PoolFile).get(file_id)
1492 __all__.append('get_poolfile_by_id')
1495 def get_poolfile_like_name(filename, session=None):
1497 Returns an array of PoolFile objects which are like the given name
1499 @type filename: string
1500 @param filename: the filename of the file to check against the DB
1503 @return: array of PoolFile objects
1506 # TODO: There must be a way of properly using bind parameters with %FOO%
1507 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1511 __all__.append('get_poolfile_like_name')
1514 def add_poolfile(filename, datadict, location_id, session=None):
1516 Add a new file to the pool
1518 @type filename: string
1519 @param filename: filename
1521 @type datadict: dict
1522 @param datadict: dict with needed data
1524 @type location_id: int
1525 @param location_id: database id of the location
1528 @return: the PoolFile object created
1530 poolfile = PoolFile()
1531 poolfile.filename = filename
1532 poolfile.filesize = datadict["size"]
1533 poolfile.md5sum = datadict["md5sum"]
1534 poolfile.sha1sum = datadict["sha1sum"]
1535 poolfile.sha256sum = datadict["sha256sum"]
1536 poolfile.location_id = location_id
1538 session.add(poolfile)
1539 # Flush to get a file id (NB: This is not a commit)
1544 __all__.append('add_poolfile')
1546 ################################################################################
1548 class Fingerprint(ORMObject):
1549 def __init__(self, fingerprint = None):
1550 self.fingerprint = fingerprint
1552 def properties(self):
1553 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1556 def not_null_constraints(self):
1557 return ['fingerprint']
1559 __all__.append('Fingerprint')
1562 def get_fingerprint(fpr, session=None):
1564 Returns Fingerprint object for given fpr.
1567 @param fpr: The fpr to find / add
1569 @type session: SQLAlchemy
1570 @param session: Optional SQL session object (a temporary one will be
1571 generated if not supplied).
1574 @return: the Fingerprint object for the given fpr or None
1577 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1581 except NoResultFound:
1586 __all__.append('get_fingerprint')
1589 def get_or_set_fingerprint(fpr, session=None):
1591 Returns Fingerprint object for given fpr.
1593 If no matching fpr is found, a row is inserted.
1596 @param fpr: The fpr to find / add
1598 @type session: SQLAlchemy
1599 @param session: Optional SQL session object (a temporary one will be
1600 generated if not supplied). If not passed, a commit will be performed at
1601 the end of the function, otherwise the caller is responsible for commiting.
1602 A flush will be performed either way.
1605 @return: the Fingerprint object for the given fpr
1608 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1612 except NoResultFound:
1613 fingerprint = Fingerprint()
1614 fingerprint.fingerprint = fpr
1615 session.add(fingerprint)
1616 session.commit_or_flush()
1621 __all__.append('get_or_set_fingerprint')
1623 ################################################################################
1625 # Helper routine for Keyring class
1626 def get_ldap_name(entry):
1628 for k in ["cn", "mn", "sn"]:
1630 if ret and ret[0] != "" and ret[0] != "-":
1632 return " ".join(name)
1634 ################################################################################
1636 class Keyring(object):
1637 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1638 " --with-colons --fingerprint --fingerprint"
1643 def __init__(self, *args, **kwargs):
1647 return '<Keyring %s>' % self.keyring_name
1649 def de_escape_gpg_str(self, txt):
1650 esclist = re.split(r'(\\x..)', txt)
1651 for x in range(1,len(esclist),2):
1652 esclist[x] = "%c" % (int(esclist[x][2:],16))
1653 return "".join(esclist)
1655 def parse_address(self, uid):
1656 """parses uid and returns a tuple of real name and email address"""
1658 (name, address) = email.Utils.parseaddr(uid)
1659 name = re.sub(r"\s*[(].*[)]", "", name)
1660 name = self.de_escape_gpg_str(name)
1663 return (name, address)
1665 def load_keys(self, keyring):
1666 if not self.keyring_id:
1667 raise Exception('Must be initialized with database information')
1669 k = os.popen(self.gpg_invocation % keyring, "r")
1673 for line in k.xreadlines():
1674 field = line.split(":")
1675 if field[0] == "pub":
1678 (name, addr) = self.parse_address(field[9])
1680 self.keys[key]["email"] = addr
1681 self.keys[key]["name"] = name
1682 self.keys[key]["fingerprints"] = []
1684 elif key and field[0] == "sub" and len(field) >= 12:
1685 signingkey = ("s" in field[11])
1686 elif key and field[0] == "uid":
1687 (name, addr) = self.parse_address(field[9])
1688 if "email" not in self.keys[key] and "@" in addr:
1689 self.keys[key]["email"] = addr
1690 self.keys[key]["name"] = name
1691 elif signingkey and field[0] == "fpr":
1692 self.keys[key]["fingerprints"].append(field[9])
1693 self.fpr_lookup[field[9]] = key
1695 def import_users_from_ldap(self, session):
1699 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1700 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1702 l = ldap.open(LDAPServer)
1703 l.simple_bind_s("","")
1704 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1705 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1706 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1708 ldap_fin_uid_id = {}
1715 uid = entry["uid"][0]
1716 name = get_ldap_name(entry)
1717 fingerprints = entry["keyFingerPrint"]
1719 for f in fingerprints:
1720 key = self.fpr_lookup.get(f, None)
1721 if key not in self.keys:
1723 self.keys[key]["uid"] = uid
1727 keyid = get_or_set_uid(uid, session).uid_id
1728 byuid[keyid] = (uid, name)
1729 byname[uid] = (keyid, name)
1731 return (byname, byuid)
1733 def generate_users_from_keyring(self, format, session):
1737 for x in self.keys.keys():
1738 if "email" not in self.keys[x]:
1740 self.keys[x]["uid"] = format % "invalid-uid"
1742 uid = format % self.keys[x]["email"]
1743 keyid = get_or_set_uid(uid, session).uid_id
1744 byuid[keyid] = (uid, self.keys[x]["name"])
1745 byname[uid] = (keyid, self.keys[x]["name"])
1746 self.keys[x]["uid"] = uid
1749 uid = format % "invalid-uid"
1750 keyid = get_or_set_uid(uid, session).uid_id
1751 byuid[keyid] = (uid, "ungeneratable user id")
1752 byname[uid] = (keyid, "ungeneratable user id")
1754 return (byname, byuid)
1756 __all__.append('Keyring')
1759 def get_keyring(keyring, session=None):
1761 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1762 If C{keyring} already has an entry, simply return the existing Keyring
1764 @type keyring: string
1765 @param keyring: the keyring name
1768 @return: the Keyring object for this keyring
1771 q = session.query(Keyring).filter_by(keyring_name=keyring)
1775 except NoResultFound:
1778 __all__.append('get_keyring')
1780 ################################################################################
1782 class KeyringACLMap(object):
1783 def __init__(self, *args, **kwargs):
1787 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1789 __all__.append('KeyringACLMap')
1791 ################################################################################
1793 class DBChange(object):
1794 def __init__(self, *args, **kwargs):
1798 return '<DBChange %s>' % self.changesname
1800 def clean_from_queue(self):
1801 session = DBConn().session().object_session(self)
1803 # Remove changes_pool_files entries
1806 # Remove changes_pending_files references
1809 # Clear out of queue
1810 self.in_queue = None
1811 self.approved_for_id = None
1813 __all__.append('DBChange')
1816 def get_dbchange(filename, session=None):
1818 returns DBChange object for given C{filename}.
1820 @type filename: string
1821 @param filename: the name of the file
1823 @type session: Session
1824 @param session: Optional SQLA session object (a temporary one will be
1825 generated if not supplied)
1828 @return: DBChange object for the given filename (C{None} if not present)
1831 q = session.query(DBChange).filter_by(changesname=filename)
1835 except NoResultFound:
1838 __all__.append('get_dbchange')
1840 ################################################################################
1842 class Location(ORMObject):
1843 def __init__(self, path = None, component = None):
1845 self.component = component
1846 # the column 'type' should go away, see comment at mapper
1847 self.archive_type = 'pool'
1849 def properties(self):
1850 return ['path', 'location_id', 'archive_type', 'component', \
1853 def not_null_constraints(self):
1854 return ['path', 'archive_type']
1856 __all__.append('Location')
1859 def get_location(location, component=None, archive=None, session=None):
1861 Returns Location object for the given combination of location, component
1864 @type location: string
1865 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1867 @type component: string
1868 @param component: the component name (if None, no restriction applied)
1870 @type archive: string
1871 @param archive: the archive name (if None, no restriction applied)
1873 @rtype: Location / None
1874 @return: Either a Location object or None if one can't be found
1877 q = session.query(Location).filter_by(path=location)
1879 if archive is not None:
1880 q = q.join(Archive).filter_by(archive_name=archive)
1882 if component is not None:
1883 q = q.join(Component).filter_by(component_name=component)
1887 except NoResultFound:
1890 __all__.append('get_location')
1892 ################################################################################
1894 class Maintainer(ORMObject):
1895 def __init__(self, name = None):
1898 def properties(self):
1899 return ['name', 'maintainer_id']
1901 def not_null_constraints(self):
1904 def get_split_maintainer(self):
1905 if not hasattr(self, 'name') or self.name is None:
1906 return ('', '', '', '')
1908 return fix_maintainer(self.name.strip())
1910 __all__.append('Maintainer')
1913 def get_or_set_maintainer(name, session=None):
1915 Returns Maintainer object for given maintainer name.
1917 If no matching maintainer name is found, a row is inserted.
1920 @param name: The maintainer name to add
1922 @type session: SQLAlchemy
1923 @param session: Optional SQL session object (a temporary one will be
1924 generated if not supplied). If not passed, a commit will be performed at
1925 the end of the function, otherwise the caller is responsible for commiting.
1926 A flush will be performed either way.
1929 @return: the Maintainer object for the given maintainer
1932 q = session.query(Maintainer).filter_by(name=name)
1935 except NoResultFound:
1936 maintainer = Maintainer()
1937 maintainer.name = name
1938 session.add(maintainer)
1939 session.commit_or_flush()
1944 __all__.append('get_or_set_maintainer')
1947 def get_maintainer(maintainer_id, session=None):
1949 Return the name of the maintainer behind C{maintainer_id} or None if that
1950 maintainer_id is invalid.
1952 @type maintainer_id: int
1953 @param maintainer_id: the id of the maintainer
1956 @return: the Maintainer with this C{maintainer_id}
1959 return session.query(Maintainer).get(maintainer_id)
1961 __all__.append('get_maintainer')
1963 ################################################################################
1965 class NewComment(object):
1966 def __init__(self, *args, **kwargs):
1970 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1972 __all__.append('NewComment')
1975 def has_new_comment(package, version, session=None):
1977 Returns true if the given combination of C{package}, C{version} has a comment.
1979 @type package: string
1980 @param package: name of the package
1982 @type version: string
1983 @param version: package version
1985 @type session: Session
1986 @param session: Optional SQLA session object (a temporary one will be
1987 generated if not supplied)
1993 q = session.query(NewComment)
1994 q = q.filter_by(package=package)
1995 q = q.filter_by(version=version)
1997 return bool(q.count() > 0)
1999 __all__.append('has_new_comment')
2002 def get_new_comments(package=None, version=None, comment_id=None, session=None):
2004 Returns (possibly empty) list of NewComment objects for the given
2007 @type package: string (optional)
2008 @param package: name of the package
2010 @type version: string (optional)
2011 @param version: package version
2013 @type comment_id: int (optional)
2014 @param comment_id: An id of a comment
2016 @type session: Session
2017 @param session: Optional SQLA session object (a temporary one will be
2018 generated if not supplied)
2021 @return: A (possibly empty) list of NewComment objects will be returned
2024 q = session.query(NewComment)
2025 if package is not None: q = q.filter_by(package=package)
2026 if version is not None: q = q.filter_by(version=version)
2027 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
2031 __all__.append('get_new_comments')
2033 ################################################################################
2035 class Override(ORMObject):
2036 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
2037 section = None, priority = None):
2038 self.package = package
2040 self.component = component
2041 self.overridetype = overridetype
2042 self.section = section
2043 self.priority = priority
2045 def properties(self):
2046 return ['package', 'suite', 'component', 'overridetype', 'section', \
2049 def not_null_constraints(self):
2050 return ['package', 'suite', 'component', 'overridetype', 'section']
2052 __all__.append('Override')
2055 def get_override(package, suite=None, component=None, overridetype=None, session=None):
2057 Returns Override object for the given parameters
2059 @type package: string
2060 @param package: The name of the package
2062 @type suite: string, list or None
2063 @param suite: The name of the suite (or suites if a list) to limit to. If
2064 None, don't limit. Defaults to None.
2066 @type component: string, list or None
2067 @param component: The name of the component (or components if a list) to
2068 limit to. If None, don't limit. Defaults to None.
2070 @type overridetype: string, list or None
2071 @param overridetype: The name of the overridetype (or overridetypes if a list) to
2072 limit to. If None, don't limit. Defaults to None.
2074 @type session: Session
2075 @param session: Optional SQLA session object (a temporary one will be
2076 generated if not supplied)
2079 @return: A (possibly empty) list of Override objects will be returned
2082 q = session.query(Override)
2083 q = q.filter_by(package=package)
2085 if suite is not None:
2086 if not isinstance(suite, list): suite = [suite]
2087 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
2089 if component is not None:
2090 if not isinstance(component, list): component = [component]
2091 q = q.join(Component).filter(Component.component_name.in_(component))
2093 if overridetype is not None:
2094 if not isinstance(overridetype, list): overridetype = [overridetype]
2095 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
2099 __all__.append('get_override')
2102 ################################################################################
2104 class OverrideType(ORMObject):
2105 def __init__(self, overridetype = None):
2106 self.overridetype = overridetype
2108 def properties(self):
2109 return ['overridetype', 'overridetype_id', 'overrides_count']
2111 def not_null_constraints(self):
2112 return ['overridetype']
2114 __all__.append('OverrideType')
2117 def get_override_type(override_type, session=None):
2119 Returns OverrideType object for given C{override type}.
2121 @type override_type: string
2122 @param override_type: The name of the override type
2124 @type session: Session
2125 @param session: Optional SQLA session object (a temporary one will be
2126 generated if not supplied)
2129 @return: the database id for the given override type
2132 q = session.query(OverrideType).filter_by(overridetype=override_type)
2136 except NoResultFound:
2139 __all__.append('get_override_type')
2141 ################################################################################
2143 class PolicyQueue(object):
2144 def __init__(self, *args, **kwargs):
2148 return '<PolicyQueue %s>' % self.queue_name
2150 __all__.append('PolicyQueue')
2153 def get_policy_queue(queuename, session=None):
2155 Returns PolicyQueue object for given C{queue name}
2157 @type queuename: string
2158 @param queuename: The name of the queue
2160 @type session: Session
2161 @param session: Optional SQLA session object (a temporary one will be
2162 generated if not supplied)
2165 @return: PolicyQueue object for the given queue
2168 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2172 except NoResultFound:
2175 __all__.append('get_policy_queue')
2178 def get_policy_queue_from_path(pathname, session=None):
2180 Returns PolicyQueue object for given C{path name}
2182 @type queuename: string
2183 @param queuename: The path
2185 @type session: Session
2186 @param session: Optional SQLA session object (a temporary one will be
2187 generated if not supplied)
2190 @return: PolicyQueue object for the given queue
2193 q = session.query(PolicyQueue).filter_by(path=pathname)
2197 except NoResultFound:
2200 __all__.append('get_policy_queue_from_path')
2202 ################################################################################
2204 class Priority(ORMObject):
2205 def __init__(self, priority = None, level = None):
2206 self.priority = priority
2209 def properties(self):
2210 return ['priority', 'priority_id', 'level', 'overrides_count']
2212 def not_null_constraints(self):
2213 return ['priority', 'level']
2215 def __eq__(self, val):
2216 if isinstance(val, str):
2217 return (self.priority == val)
2218 # This signals to use the normal comparison operator
2219 return NotImplemented
2221 def __ne__(self, val):
2222 if isinstance(val, str):
2223 return (self.priority != val)
2224 # This signals to use the normal comparison operator
2225 return NotImplemented
2227 __all__.append('Priority')
2230 def get_priority(priority, session=None):
2232 Returns Priority object for given C{priority name}.
2234 @type priority: string
2235 @param priority: The name of the priority
2237 @type session: Session
2238 @param session: Optional SQLA session object (a temporary one will be
2239 generated if not supplied)
2242 @return: Priority object for the given priority
2245 q = session.query(Priority).filter_by(priority=priority)
2249 except NoResultFound:
2252 __all__.append('get_priority')
2255 def get_priorities(session=None):
2257 Returns dictionary of priority names -> id mappings
2259 @type session: Session
2260 @param session: Optional SQL session object (a temporary one will be
2261 generated if not supplied)
2264 @return: dictionary of priority names -> id mappings
2268 q = session.query(Priority)
2270 ret[x.priority] = x.priority_id
2274 __all__.append('get_priorities')
2276 ################################################################################
2278 class Section(ORMObject):
2279 def __init__(self, section = None):
2280 self.section = section
2282 def properties(self):
2283 return ['section', 'section_id', 'overrides_count']
2285 def not_null_constraints(self):
2288 def __eq__(self, val):
2289 if isinstance(val, str):
2290 return (self.section == val)
2291 # This signals to use the normal comparison operator
2292 return NotImplemented
2294 def __ne__(self, val):
2295 if isinstance(val, str):
2296 return (self.section != val)
2297 # This signals to use the normal comparison operator
2298 return NotImplemented
2300 __all__.append('Section')
2303 def get_section(section, session=None):
2305 Returns Section object for given C{section name}.
2307 @type section: string
2308 @param section: The name of the section
2310 @type session: Session
2311 @param session: Optional SQLA session object (a temporary one will be
2312 generated if not supplied)
2315 @return: Section object for the given section name
2318 q = session.query(Section).filter_by(section=section)
2322 except NoResultFound:
2325 __all__.append('get_section')
2328 def get_sections(session=None):
2330 Returns dictionary of section names -> id mappings
2332 @type session: Session
2333 @param session: Optional SQL session object (a temporary one will be
2334 generated if not supplied)
2337 @return: dictionary of section names -> id mappings
2341 q = session.query(Section)
2343 ret[x.section] = x.section_id
2347 __all__.append('get_sections')
2349 ################################################################################
2351 class SrcContents(ORMObject):
2352 def __init__(self, file = None, source = None):
2354 self.source = source
2356 def properties(self):
2357 return ['file', 'source']
2359 __all__.append('SrcContents')
2361 ################################################################################
2363 from debian.debfile import Deb822
2365 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
2366 class Dak822(Deb822):
2367 def _internal_parser(self, sequence, fields=None):
2368 # The key is non-whitespace, non-colon characters before any colon.
2369 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
2370 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
2371 multi = re.compile(key_part + r"$")
2372 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
2374 wanted_field = lambda f: fields is None or f in fields
2376 if isinstance(sequence, basestring):
2377 sequence = sequence.splitlines()
2381 for line in self.gpg_stripped_paragraph(sequence):
2382 m = single.match(line)
2385 self[curkey] = content
2387 if not wanted_field(m.group('key')):
2391 curkey = m.group('key')
2392 content = m.group('data')
2395 m = multi.match(line)
2398 self[curkey] = content
2400 if not wanted_field(m.group('key')):
2404 curkey = m.group('key')
2408 m = multidata.match(line)
2410 content += '\n' + line # XXX not m.group('data')?
2414 self[curkey] = content
2417 class DBSource(ORMObject):
2418 def __init__(self, source = None, version = None, maintainer = None, \
2419 changedby = None, poolfile = None, install_date = None):
2420 self.source = source
2421 self.version = version
2422 self.maintainer = maintainer
2423 self.changedby = changedby
2424 self.poolfile = poolfile
2425 self.install_date = install_date
2429 return self.source_id
2431 def properties(self):
2432 return ['source', 'source_id', 'maintainer', 'changedby', \
2433 'fingerprint', 'poolfile', 'version', 'suites_count', \
2434 'install_date', 'binaries_count', 'uploaders_count']
2436 def not_null_constraints(self):
2437 return ['source', 'version', 'install_date', 'maintainer', \
2438 'changedby', 'poolfile', 'install_date']
2440 def read_control_fields(self):
2442 Reads the control information from a dsc
2445 @return: fields is the dsc information in a dictionary form
2447 fullpath = self.poolfile.fullpath
2448 fields = Dak822(open(self.poolfile.fullpath, 'r'))
2451 metadata = association_proxy('key', 'value')
2453 def scan_contents(self):
2455 Returns a set of names for non directories. The path names are
2456 normalized after converting them from either utf-8 or iso8859-1
2459 fullpath = self.poolfile.fullpath
2460 from daklib.contents import UnpackedSource
2461 unpacked = UnpackedSource(fullpath)
2463 for name in unpacked.get_all_filenames():
2464 # enforce proper utf-8 encoding
2466 name.decode('utf-8')
2467 except UnicodeDecodeError:
2468 name = name.decode('iso8859-1').encode('utf-8')
2472 __all__.append('DBSource')
2475 def source_exists(source, source_version, suites = ["any"], session=None):
2477 Ensure that source exists somewhere in the archive for the binary
2478 upload being processed.
2479 1. exact match => 1.0-3
2480 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2482 @type source: string
2483 @param source: source name
2485 @type source_version: string
2486 @param source_version: expected source version
2489 @param suites: list of suites to check in, default I{any}
2491 @type session: Session
2492 @param session: Optional SQLA session object (a temporary one will be
2493 generated if not supplied)
2496 @return: returns 1 if a source with expected version is found, otherwise 0
2503 from daklib.regexes import re_bin_only_nmu
2504 orig_source_version = re_bin_only_nmu.sub('', source_version)
2506 for suite in suites:
2507 q = session.query(DBSource).filter_by(source=source). \
2508 filter(DBSource.version.in_([source_version, orig_source_version]))
2510 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2511 s = get_suite(suite, session)
2512 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2513 considered_suites = [ vc.reference for vc in enhances_vcs ]
2514 considered_suites.append(s)
2516 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2521 # No source found so return not ok
2526 __all__.append('source_exists')
2529 def get_suites_source_in(source, session=None):
2531 Returns list of Suite objects which given C{source} name is in
2534 @param source: DBSource package name to search for
2537 @return: list of Suite objects for the given source
2540 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2542 __all__.append('get_suites_source_in')
2545 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2547 Returns list of DBSource objects for given C{source} name and other parameters
2550 @param source: DBSource package name to search for
2552 @type version: str or None
2553 @param version: DBSource version name to search for or None if not applicable
2555 @type dm_upload_allowed: bool
2556 @param dm_upload_allowed: If None, no effect. If True or False, only
2557 return packages with that dm_upload_allowed setting
2559 @type session: Session
2560 @param session: Optional SQL session object (a temporary one will be
2561 generated if not supplied)
2564 @return: list of DBSource objects for the given name (may be empty)
2567 q = session.query(DBSource).filter_by(source=source)
2569 if version is not None:
2570 q = q.filter_by(version=version)
2572 if dm_upload_allowed is not None:
2573 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2577 __all__.append('get_sources_from_name')
2579 # FIXME: This function fails badly if it finds more than 1 source package and
2580 # its implementation is trivial enough to be inlined.
2582 def get_source_in_suite(source, suite, session=None):
2584 Returns a DBSource object for a combination of C{source} and C{suite}.
2586 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2587 - B{suite} - a suite name, eg. I{unstable}
2589 @type source: string
2590 @param source: source package name
2593 @param suite: the suite name
2596 @return: the version for I{source} in I{suite}
2600 q = get_suite(suite, session).get_sources(source)
2603 except NoResultFound:
2606 __all__.append('get_source_in_suite')
2609 def import_metadata_into_db(obj, session=None):
2611 This routine works on either DBBinary or DBSource objects and imports
2612 their metadata into the database
2614 fields = obj.read_control_fields()
2615 for k in fields.keys():
2618 val = str(fields[k])
2619 except UnicodeEncodeError:
2620 # Fall back to UTF-8
2622 val = fields[k].encode('utf-8')
2623 except UnicodeEncodeError:
2624 # Finally try iso8859-1
2625 val = fields[k].encode('iso8859-1')
2626 # Otherwise we allow the exception to percolate up and we cause
2627 # a reject as someone is playing silly buggers
2629 obj.metadata[get_or_set_metadatakey(k, session)] = val
2631 session.commit_or_flush()
2633 __all__.append('import_metadata_into_db')
2636 ################################################################################
2638 def split_uploaders(uploaders_list):
2640 Split the Uploaders field into the individual uploaders and yield each of
2641 them. Beware: email addresses might contain commas.
2643 for uploader in uploaders_list.replace(">, ", ">\t").split("\t"):
2644 yield uploader.strip()
2647 def add_dsc_to_db(u, filename, session=None):
2648 entry = u.pkg.files[filename]
2652 source.source = u.pkg.dsc["source"]
2653 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2654 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2655 # If Changed-By isn't available, fall back to maintainer
2656 if u.pkg.changes.has_key("changed-by"):
2657 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2659 source.changedby_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2660 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2661 source.install_date = datetime.now().date()
2663 dsc_component = entry["component"]
2664 dsc_location_id = entry["location id"]
2666 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2668 # Set up a new poolfile if necessary
2669 if not entry.has_key("files id") or not entry["files id"]:
2670 filename = entry["pool name"] + filename
2671 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2673 pfs.append(poolfile)
2674 entry["files id"] = poolfile.file_id
2676 source.poolfile_id = entry["files id"]
2679 suite_names = u.pkg.changes["distribution"].keys()
2680 source.suites = session.query(Suite). \
2681 filter(Suite.suite_name.in_(suite_names)).all()
2683 # Add the source files to the DB (files and dsc_files)
2685 dscfile.source_id = source.source_id
2686 dscfile.poolfile_id = entry["files id"]
2687 session.add(dscfile)
2689 for dsc_file, dentry in u.pkg.dsc_files.items():
2691 df.source_id = source.source_id
2693 # If the .orig tarball is already in the pool, it's
2694 # files id is stored in dsc_files by check_dsc().
2695 files_id = dentry.get("files id", None)
2697 # Find the entry in the files hash
2698 # TODO: Bail out here properly
2700 for f, e in u.pkg.files.items():
2705 if files_id is None:
2706 filename = dfentry["pool name"] + dsc_file
2708 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2709 # FIXME: needs to check for -1/-2 and or handle exception
2710 if found and obj is not None:
2711 files_id = obj.file_id
2714 # If still not found, add it
2715 if files_id is None:
2716 # HACK: Force sha1sum etc into dentry
2717 dentry["sha1sum"] = dfentry["sha1sum"]
2718 dentry["sha256sum"] = dfentry["sha256sum"]
2719 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2720 pfs.append(poolfile)
2721 files_id = poolfile.file_id
2723 poolfile = get_poolfile_by_id(files_id, session)
2724 if poolfile is None:
2725 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2726 pfs.append(poolfile)
2728 df.poolfile_id = files_id
2731 # Add the src_uploaders to the DB
2733 session.refresh(source)
2734 source.uploaders = [source.maintainer]
2735 if u.pkg.dsc.has_key("uploaders"):
2736 for up in split_uploaders(u.pkg.dsc["uploaders"]):
2737 source.uploaders.append(get_or_set_maintainer(up, session))
2741 return source, dsc_component, dsc_location_id, pfs
2743 __all__.append('add_dsc_to_db')
2746 def add_deb_to_db(u, filename, session=None):
2748 Contrary to what you might expect, this routine deals with both
2749 debs and udebs. That info is in 'dbtype', whilst 'type' is
2750 'deb' for both of them
2753 entry = u.pkg.files[filename]
2756 bin.package = entry["package"]
2757 bin.version = entry["version"]
2758 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2759 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2760 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2761 bin.binarytype = entry["dbtype"]
2764 filename = entry["pool name"] + filename
2765 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2766 if not entry.get("location id", None):
2767 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2769 if entry.get("files id", None):
2770 poolfile = get_poolfile_by_id(bin.poolfile_id)
2771 bin.poolfile_id = entry["files id"]
2773 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2774 bin.poolfile_id = entry["files id"] = poolfile.file_id
2777 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2778 if len(bin_sources) != 1:
2779 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2780 (bin.package, bin.version, entry["architecture"],
2781 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2783 bin.source_id = bin_sources[0].source_id
2785 if entry.has_key("built-using"):
2786 for srcname, version in entry["built-using"]:
2787 exsources = get_sources_from_name(srcname, version, session=session)
2788 if len(exsources) != 1:
2789 raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2790 (srcname, version, bin.package, bin.version, entry["architecture"],
2791 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2793 bin.extra_sources.append(exsources[0])
2795 # Add and flush object so it has an ID
2798 suite_names = u.pkg.changes["distribution"].keys()
2799 bin.suites = session.query(Suite). \
2800 filter(Suite.suite_name.in_(suite_names)).all()
2804 # Deal with contents - disabled for now
2805 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2807 # print "REJECT\nCould not determine contents of package %s" % bin.package
2808 # session.rollback()
2809 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2811 return bin, poolfile
2813 __all__.append('add_deb_to_db')
2815 ################################################################################
2817 class SourceACL(object):
2818 def __init__(self, *args, **kwargs):
2822 return '<SourceACL %s>' % self.source_acl_id
2824 __all__.append('SourceACL')
2826 ################################################################################
2828 class SrcFormat(object):
2829 def __init__(self, *args, **kwargs):
2833 return '<SrcFormat %s>' % (self.format_name)
2835 __all__.append('SrcFormat')
2837 ################################################################################
2839 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2840 ('SuiteID', 'suite_id'),
2841 ('Version', 'version'),
2842 ('Origin', 'origin'),
2844 ('Description', 'description'),
2845 ('Untouchable', 'untouchable'),
2846 ('Announce', 'announce'),
2847 ('Codename', 'codename'),
2848 ('OverrideCodename', 'overridecodename'),
2849 ('ValidTime', 'validtime'),
2850 ('Priority', 'priority'),
2851 ('NotAutomatic', 'notautomatic'),
2852 ('CopyChanges', 'copychanges'),
2853 ('OverrideSuite', 'overridesuite')]
2855 # Why the heck don't we have any UNIQUE constraints in table suite?
2856 # TODO: Add UNIQUE constraints for appropriate columns.
2857 class Suite(ORMObject):
2858 def __init__(self, suite_name = None, version = None):
2859 self.suite_name = suite_name
2860 self.version = version
2862 def properties(self):
2863 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2866 def not_null_constraints(self):
2867 return ['suite_name']
2869 def __eq__(self, val):
2870 if isinstance(val, str):
2871 return (self.suite_name == val)
2872 # This signals to use the normal comparison operator
2873 return NotImplemented
2875 def __ne__(self, val):
2876 if isinstance(val, str):
2877 return (self.suite_name != val)
2878 # This signals to use the normal comparison operator
2879 return NotImplemented
2883 for disp, field in SUITE_FIELDS:
2884 val = getattr(self, field, None)
2886 ret.append("%s: %s" % (disp, val))
2888 return "\n".join(ret)
2890 def get_architectures(self, skipsrc=False, skipall=False):
2892 Returns list of Architecture objects
2894 @type skipsrc: boolean
2895 @param skipsrc: Whether to skip returning the 'source' architecture entry
2898 @type skipall: boolean
2899 @param skipall: Whether to skip returning the 'all' architecture entry
2903 @return: list of Architecture objects for the given name (may be empty)
2906 q = object_session(self).query(Architecture).with_parent(self)
2908 q = q.filter(Architecture.arch_string != 'source')
2910 q = q.filter(Architecture.arch_string != 'all')
2911 return q.order_by(Architecture.arch_string).all()
2913 def get_sources(self, source):
2915 Returns a query object representing DBSource that is part of C{suite}.
2917 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2919 @type source: string
2920 @param source: source package name
2922 @rtype: sqlalchemy.orm.query.Query
2923 @return: a query of DBSource
2927 session = object_session(self)
2928 return session.query(DBSource).filter_by(source = source). \
2931 def get_overridesuite(self):
2932 if self.overridesuite is None:
2935 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2937 __all__.append('Suite')
2940 def get_suite(suite, session=None):
2942 Returns Suite object for given C{suite name}.
2945 @param suite: The name of the suite
2947 @type session: Session
2948 @param session: Optional SQLA session object (a temporary one will be
2949 generated if not supplied)
2952 @return: Suite object for the requested suite name (None if not present)
2955 q = session.query(Suite).filter_by(suite_name=suite)
2959 except NoResultFound:
2962 __all__.append('get_suite')
2964 ################################################################################
2966 # TODO: should be removed because the implementation is too trivial
2968 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2970 Returns list of Architecture objects for given C{suite} name
2973 @param suite: Suite name to search for
2975 @type skipsrc: boolean
2976 @param skipsrc: Whether to skip returning the 'source' architecture entry
2979 @type skipall: boolean
2980 @param skipall: Whether to skip returning the 'all' architecture entry
2983 @type session: Session
2984 @param session: Optional SQL session object (a temporary one will be
2985 generated if not supplied)
2988 @return: list of Architecture objects for the given name (may be empty)
2991 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2993 __all__.append('get_suite_architectures')
2995 ################################################################################
2997 class SuiteSrcFormat(object):
2998 def __init__(self, *args, **kwargs):
3002 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
3004 __all__.append('SuiteSrcFormat')
3007 def get_suite_src_formats(suite, session=None):
3009 Returns list of allowed SrcFormat for C{suite}.
3012 @param suite: Suite name to search for
3014 @type session: Session
3015 @param session: Optional SQL session object (a temporary one will be
3016 generated if not supplied)
3019 @return: the list of allowed source formats for I{suite}
3022 q = session.query(SrcFormat)
3023 q = q.join(SuiteSrcFormat)
3024 q = q.join(Suite).filter_by(suite_name=suite)
3025 q = q.order_by('format_name')
3029 __all__.append('get_suite_src_formats')
3031 ################################################################################
3033 class Uid(ORMObject):
3034 def __init__(self, uid = None, name = None):
3038 def __eq__(self, val):
3039 if isinstance(val, str):
3040 return (self.uid == val)
3041 # This signals to use the normal comparison operator
3042 return NotImplemented
3044 def __ne__(self, val):
3045 if isinstance(val, str):
3046 return (self.uid != val)
3047 # This signals to use the normal comparison operator
3048 return NotImplemented
3050 def properties(self):
3051 return ['uid', 'name', 'fingerprint']
3053 def not_null_constraints(self):
3056 __all__.append('Uid')
3059 def get_or_set_uid(uidname, session=None):
3061 Returns uid object for given uidname.
3063 If no matching uidname is found, a row is inserted.
3065 @type uidname: string
3066 @param uidname: The uid to add
3068 @type session: SQLAlchemy
3069 @param session: Optional SQL session object (a temporary one will be
3070 generated if not supplied). If not passed, a commit will be performed at
3071 the end of the function, otherwise the caller is responsible for commiting.
3074 @return: the uid object for the given uidname
3077 q = session.query(Uid).filter_by(uid=uidname)
3081 except NoResultFound:
3085 session.commit_or_flush()
3090 __all__.append('get_or_set_uid')
3093 def get_uid_from_fingerprint(fpr, session=None):
3094 q = session.query(Uid)
3095 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
3099 except NoResultFound:
3102 __all__.append('get_uid_from_fingerprint')
3104 ################################################################################
3106 class UploadBlock(object):
3107 def __init__(self, *args, **kwargs):
3111 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
3113 __all__.append('UploadBlock')
3115 ################################################################################
3117 class MetadataKey(ORMObject):
3118 def __init__(self, key = None):
3121 def properties(self):
3124 def not_null_constraints(self):
3127 __all__.append('MetadataKey')
3130 def get_or_set_metadatakey(keyname, session=None):
3132 Returns MetadataKey object for given uidname.
3134 If no matching keyname is found, a row is inserted.
3136 @type uidname: string
3137 @param uidname: The keyname to add
3139 @type session: SQLAlchemy
3140 @param session: Optional SQL session object (a temporary one will be
3141 generated if not supplied). If not passed, a commit will be performed at
3142 the end of the function, otherwise the caller is responsible for commiting.
3145 @return: the metadatakey object for the given keyname
3148 q = session.query(MetadataKey).filter_by(key=keyname)
3152 except NoResultFound:
3153 ret = MetadataKey(keyname)
3155 session.commit_or_flush()
3159 __all__.append('get_or_set_metadatakey')
3161 ################################################################################
3163 class BinaryMetadata(ORMObject):
3164 def __init__(self, key = None, value = None, binary = None):
3167 self.binary = binary
3169 def properties(self):
3170 return ['binary', 'key', 'value']
3172 def not_null_constraints(self):
3175 __all__.append('BinaryMetadata')
3177 ################################################################################
3179 class SourceMetadata(ORMObject):
3180 def __init__(self, key = None, value = None, source = None):
3183 self.source = source
3185 def properties(self):
3186 return ['source', 'key', 'value']
3188 def not_null_constraints(self):
3191 __all__.append('SourceMetadata')
3193 ################################################################################
3195 class VersionCheck(ORMObject):
3196 def __init__(self, *args, **kwargs):
3199 def properties(self):
3200 #return ['suite_id', 'check', 'reference_id']
3203 def not_null_constraints(self):
3204 return ['suite', 'check', 'reference']
3206 __all__.append('VersionCheck')
3209 def get_version_checks(suite_name, check = None, session = None):
3210 suite = get_suite(suite_name, session)
3212 # Make sure that what we return is iterable so that list comprehensions
3213 # involving this don't cause a traceback
3215 q = session.query(VersionCheck).filter_by(suite=suite)
3217 q = q.filter_by(check=check)
3220 __all__.append('get_version_checks')
3222 ################################################################################
3224 class DBConn(object):
3226 database module init.
3230 def __init__(self, *args, **kwargs):
3231 self.__dict__ = self.__shared_state
3233 if not getattr(self, 'initialised', False):
3234 self.initialised = True
3235 self.debug = kwargs.has_key('debug')
3238 def __setuptables(self):
3245 'binaries_metadata',
3249 'build_queue_files',
3250 'build_queue_policy_files',
3255 'changes_pending_binaries',
3256 'changes_pending_files',
3257 'changes_pending_source',
3258 'changes_pending_files_map',
3259 'changes_pending_source_files',
3260 'changes_pool_files',
3262 'external_overrides',
3263 'extra_src_references',
3272 # TODO: the maintainer column in table override should be removed.
3286 'suite_architectures',
3287 'suite_build_queue_copy',
3288 'suite_src_formats',
3295 'almost_obsolete_all_associations',
3296 'almost_obsolete_src_associations',
3297 'any_associations_source',
3298 'bin_associations_binaries',
3299 'binaries_suite_arch',
3300 'binfiles_suite_component_arch',
3303 'newest_all_associations',
3304 'newest_any_associations',
3306 'newest_src_association',
3307 'obsolete_all_associations',
3308 'obsolete_any_associations',
3309 'obsolete_any_by_all_associations',
3310 'obsolete_src_associations',
3312 'src_associations_bin',
3313 'src_associations_src',
3314 'suite_arch_by_name',
3317 for table_name in tables:
3318 table = Table(table_name, self.db_meta, \
3319 autoload=True, useexisting=True)
3320 setattr(self, 'tbl_%s' % table_name, table)
3322 for view_name in views:
3323 view = Table(view_name, self.db_meta, autoload=True)
3324 setattr(self, 'view_%s' % view_name, view)
3326 def __setupmappers(self):
3327 mapper(Architecture, self.tbl_architecture,
3328 properties = dict(arch_id = self.tbl_architecture.c.id,
3329 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3330 order_by='suite_name',
3331 backref=backref('architectures', order_by='arch_string'))),
3332 extension = validator)
3334 mapper(Archive, self.tbl_archive,
3335 properties = dict(archive_id = self.tbl_archive.c.id,
3336 archive_name = self.tbl_archive.c.name))
3338 mapper(BuildQueue, self.tbl_build_queue,
3339 properties = dict(queue_id = self.tbl_build_queue.c.id))
3341 mapper(BuildQueueFile, self.tbl_build_queue_files,
3342 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3343 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3345 mapper(BuildQueuePolicyFile, self.tbl_build_queue_policy_files,
3347 build_queue = relation(BuildQueue, backref='policy_queue_files'),
3348 file = relation(ChangePendingFile, lazy='joined')))
3350 mapper(DBBinary, self.tbl_binaries,
3351 properties = dict(binary_id = self.tbl_binaries.c.id,
3352 package = self.tbl_binaries.c.package,
3353 version = self.tbl_binaries.c.version,
3354 maintainer_id = self.tbl_binaries.c.maintainer,
3355 maintainer = relation(Maintainer),
3356 source_id = self.tbl_binaries.c.source,
3357 source = relation(DBSource, backref='binaries'),
3358 arch_id = self.tbl_binaries.c.architecture,
3359 architecture = relation(Architecture),
3360 poolfile_id = self.tbl_binaries.c.file,
3361 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3362 binarytype = self.tbl_binaries.c.type,
3363 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3364 fingerprint = relation(Fingerprint),
3365 install_date = self.tbl_binaries.c.install_date,
3366 suites = relation(Suite, secondary=self.tbl_bin_associations,
3367 backref=backref('binaries', lazy='dynamic')),
3368 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3369 backref=backref('extra_binary_references', lazy='dynamic')),
3370 key = relation(BinaryMetadata, cascade='all',
3371 collection_class=attribute_mapped_collection('key'))),
3372 extension = validator)
3374 mapper(BinaryACL, self.tbl_binary_acl,
3375 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3377 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3378 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3379 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3380 architecture = relation(Architecture)))
3382 mapper(Component, self.tbl_component,
3383 properties = dict(component_id = self.tbl_component.c.id,
3384 component_name = self.tbl_component.c.name),
3385 extension = validator)
3387 mapper(DBConfig, self.tbl_config,
3388 properties = dict(config_id = self.tbl_config.c.id))
3390 mapper(DSCFile, self.tbl_dsc_files,
3391 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3392 source_id = self.tbl_dsc_files.c.source,
3393 source = relation(DBSource),
3394 poolfile_id = self.tbl_dsc_files.c.file,
3395 poolfile = relation(PoolFile)))
3397 mapper(ExternalOverride, self.tbl_external_overrides,
3399 suite_id = self.tbl_external_overrides.c.suite,
3400 suite = relation(Suite),
3401 component_id = self.tbl_external_overrides.c.component,
3402 component = relation(Component)))
3404 mapper(PoolFile, self.tbl_files,
3405 properties = dict(file_id = self.tbl_files.c.id,
3406 filesize = self.tbl_files.c.size,
3407 location_id = self.tbl_files.c.location,
3408 location = relation(Location,
3409 # using lazy='dynamic' in the back
3410 # reference because we have A LOT of
3411 # files in one location
3412 backref=backref('files', lazy='dynamic'))),
3413 extension = validator)
3415 mapper(Fingerprint, self.tbl_fingerprint,
3416 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3417 uid_id = self.tbl_fingerprint.c.uid,
3418 uid = relation(Uid),
3419 keyring_id = self.tbl_fingerprint.c.keyring,
3420 keyring = relation(Keyring),
3421 source_acl = relation(SourceACL),
3422 binary_acl = relation(BinaryACL)),
3423 extension = validator)
3425 mapper(Keyring, self.tbl_keyrings,
3426 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3427 keyring_id = self.tbl_keyrings.c.id))
3429 mapper(DBChange, self.tbl_changes,
3430 properties = dict(change_id = self.tbl_changes.c.id,
3431 poolfiles = relation(PoolFile,
3432 secondary=self.tbl_changes_pool_files,
3433 backref="changeslinks"),
3434 seen = self.tbl_changes.c.seen,
3435 source = self.tbl_changes.c.source,
3436 binaries = self.tbl_changes.c.binaries,
3437 architecture = self.tbl_changes.c.architecture,
3438 distribution = self.tbl_changes.c.distribution,
3439 urgency = self.tbl_changes.c.urgency,
3440 maintainer = self.tbl_changes.c.maintainer,
3441 changedby = self.tbl_changes.c.changedby,
3442 date = self.tbl_changes.c.date,
3443 version = self.tbl_changes.c.version,
3444 files = relation(ChangePendingFile,
3445 secondary=self.tbl_changes_pending_files_map,
3446 backref="changesfile"),
3447 in_queue_id = self.tbl_changes.c.in_queue,
3448 in_queue = relation(PolicyQueue,
3449 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3450 approved_for_id = self.tbl_changes.c.approved_for))
3452 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3453 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3455 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3456 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3457 filename = self.tbl_changes_pending_files.c.filename,
3458 size = self.tbl_changes_pending_files.c.size,
3459 md5sum = self.tbl_changes_pending_files.c.md5sum,
3460 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3461 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3463 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3464 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3465 change = relation(DBChange),
3466 maintainer = relation(Maintainer,
3467 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3468 changedby = relation(Maintainer,
3469 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3470 fingerprint = relation(Fingerprint),
3471 source_files = relation(ChangePendingFile,
3472 secondary=self.tbl_changes_pending_source_files,
3473 backref="pending_sources")))
3476 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3477 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3478 keyring = relation(Keyring, backref="keyring_acl_map"),
3479 architecture = relation(Architecture)))
3481 mapper(Location, self.tbl_location,
3482 properties = dict(location_id = self.tbl_location.c.id,
3483 component_id = self.tbl_location.c.component,
3484 component = relation(Component, backref='location'),
3485 archive_id = self.tbl_location.c.archive,
3486 archive = relation(Archive),
3487 # FIXME: the 'type' column is old cruft and
3488 # should be removed in the future.
3489 archive_type = self.tbl_location.c.type),
3490 extension = validator)
3492 mapper(Maintainer, self.tbl_maintainer,
3493 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3494 maintains_sources = relation(DBSource, backref='maintainer',
3495 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3496 changed_sources = relation(DBSource, backref='changedby',
3497 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3498 extension = validator)
3500 mapper(NewComment, self.tbl_new_comments,
3501 properties = dict(comment_id = self.tbl_new_comments.c.id))
3503 mapper(Override, self.tbl_override,
3504 properties = dict(suite_id = self.tbl_override.c.suite,
3505 suite = relation(Suite, \
3506 backref=backref('overrides', lazy='dynamic')),
3507 package = self.tbl_override.c.package,
3508 component_id = self.tbl_override.c.component,
3509 component = relation(Component, \
3510 backref=backref('overrides', lazy='dynamic')),
3511 priority_id = self.tbl_override.c.priority,
3512 priority = relation(Priority, \
3513 backref=backref('overrides', lazy='dynamic')),
3514 section_id = self.tbl_override.c.section,
3515 section = relation(Section, \
3516 backref=backref('overrides', lazy='dynamic')),
3517 overridetype_id = self.tbl_override.c.type,
3518 overridetype = relation(OverrideType, \
3519 backref=backref('overrides', lazy='dynamic'))))
3521 mapper(OverrideType, self.tbl_override_type,
3522 properties = dict(overridetype = self.tbl_override_type.c.type,
3523 overridetype_id = self.tbl_override_type.c.id))
3525 mapper(PolicyQueue, self.tbl_policy_queue,
3526 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3528 mapper(Priority, self.tbl_priority,
3529 properties = dict(priority_id = self.tbl_priority.c.id))
3531 mapper(Section, self.tbl_section,
3532 properties = dict(section_id = self.tbl_section.c.id,
3533 section=self.tbl_section.c.section))
3535 mapper(DBSource, self.tbl_source,
3536 properties = dict(source_id = self.tbl_source.c.id,
3537 version = self.tbl_source.c.version,
3538 maintainer_id = self.tbl_source.c.maintainer,
3539 poolfile_id = self.tbl_source.c.file,
3540 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3541 fingerprint_id = self.tbl_source.c.sig_fpr,
3542 fingerprint = relation(Fingerprint),
3543 changedby_id = self.tbl_source.c.changedby,
3544 srcfiles = relation(DSCFile,
3545 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3546 suites = relation(Suite, secondary=self.tbl_src_associations,
3547 backref=backref('sources', lazy='dynamic')),
3548 uploaders = relation(Maintainer,
3549 secondary=self.tbl_src_uploaders),
3550 key = relation(SourceMetadata, cascade='all',
3551 collection_class=attribute_mapped_collection('key'))),
3552 extension = validator)
3554 mapper(SourceACL, self.tbl_source_acl,
3555 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3557 mapper(SrcFormat, self.tbl_src_format,
3558 properties = dict(src_format_id = self.tbl_src_format.c.id,
3559 format_name = self.tbl_src_format.c.format_name))
3561 mapper(Suite, self.tbl_suite,
3562 properties = dict(suite_id = self.tbl_suite.c.id,
3563 policy_queue = relation(PolicyQueue),
3564 copy_queues = relation(BuildQueue,
3565 secondary=self.tbl_suite_build_queue_copy)),
3566 extension = validator)
3568 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3569 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3570 suite = relation(Suite, backref='suitesrcformats'),
3571 src_format_id = self.tbl_suite_src_formats.c.src_format,
3572 src_format = relation(SrcFormat)))
3574 mapper(Uid, self.tbl_uid,
3575 properties = dict(uid_id = self.tbl_uid.c.id,
3576 fingerprint = relation(Fingerprint)),
3577 extension = validator)
3579 mapper(UploadBlock, self.tbl_upload_blocks,
3580 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3581 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3582 uid = relation(Uid, backref="uploadblocks")))
3584 mapper(BinContents, self.tbl_bin_contents,
3586 binary = relation(DBBinary,
3587 backref=backref('contents', lazy='dynamic', cascade='all')),
3588 file = self.tbl_bin_contents.c.file))
3590 mapper(SrcContents, self.tbl_src_contents,
3592 source = relation(DBSource,
3593 backref=backref('contents', lazy='dynamic', cascade='all')),
3594 file = self.tbl_src_contents.c.file))
3596 mapper(MetadataKey, self.tbl_metadata_keys,
3598 key_id = self.tbl_metadata_keys.c.key_id,
3599 key = self.tbl_metadata_keys.c.key))
3601 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3603 binary_id = self.tbl_binaries_metadata.c.bin_id,
3604 binary = relation(DBBinary),
3605 key_id = self.tbl_binaries_metadata.c.key_id,
3606 key = relation(MetadataKey),
3607 value = self.tbl_binaries_metadata.c.value))
3609 mapper(SourceMetadata, self.tbl_source_metadata,
3611 source_id = self.tbl_source_metadata.c.src_id,
3612 source = relation(DBSource),
3613 key_id = self.tbl_source_metadata.c.key_id,
3614 key = relation(MetadataKey),
3615 value = self.tbl_source_metadata.c.value))
3617 mapper(VersionCheck, self.tbl_version_check,
3619 suite_id = self.tbl_version_check.c.suite,
3620 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
3621 reference_id = self.tbl_version_check.c.reference,
3622 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
3624 ## Connection functions
3625 def __createconn(self):
3626 from config import Config
3628 if cnf.has_key("DB::Service"):
3629 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3630 elif cnf.has_key("DB::Host"):
3632 connstr = "postgresql://%s" % cnf["DB::Host"]
3633 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3634 connstr += ":%s" % cnf["DB::Port"]
3635 connstr += "/%s" % cnf["DB::Name"]
3638 connstr = "postgresql:///%s" % cnf["DB::Name"]
3639 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3640 connstr += "?port=%s" % cnf["DB::Port"]
3642 engine_args = { 'echo': self.debug }
3643 if cnf.has_key('DB::PoolSize'):
3644 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3645 if cnf.has_key('DB::MaxOverflow'):
3646 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3647 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3648 cnf['DB::Unicode'] == 'false':
3649 engine_args['use_native_unicode'] = False
3651 # Monkey patch a new dialect in in order to support service= syntax
3652 import sqlalchemy.dialects.postgresql
3653 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3654 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3655 def create_connect_args(self, url):
3656 if str(url).startswith('postgresql://service='):
3658 servicename = str(url)[21:]
3659 return (['service=%s' % servicename], {})
3661 return PGDialect_psycopg2.create_connect_args(self, url)
3663 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3665 self.db_pg = create_engine(connstr, **engine_args)
3666 self.db_meta = MetaData()
3667 self.db_meta.bind = self.db_pg
3668 self.db_smaker = sessionmaker(bind=self.db_pg,
3672 self.__setuptables()
3673 self.__setupmappers()
3674 self.pid = os.getpid()
3676 def session(self, work_mem = 0):
3678 Returns a new session object. If a work_mem parameter is provided a new
3679 transaction is started and the work_mem parameter is set for this
3680 transaction. The work_mem parameter is measured in MB. A default value
3681 will be used if the parameter is not set.
3683 # reinitialize DBConn in new processes
3684 if self.pid != os.getpid():
3687 session = self.db_smaker()
3689 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
3692 __all__.append('DBConn')