5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
86 ################################################################################
88 # Patch in support for the debversion field type so that it works during
92 # that is for sqlalchemy 0.6
93 UserDefinedType = sqltypes.UserDefinedType
95 # this one for sqlalchemy 0.5
96 UserDefinedType = sqltypes.TypeEngine
98 class DebVersion(UserDefinedType):
99 def get_col_spec(self):
102 def bind_processor(self, dialect):
105 # ' = None' is needed for sqlalchemy 0.5:
106 def result_processor(self, dialect, coltype = None):
109 sa_major_version = sqlalchemy.__version__[0:3]
110 if sa_major_version in ["0.5", "0.6"]:
111 from sqlalchemy.databases import postgres
112 postgres.ischema_names['debversion'] = DebVersion
114 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
116 ################################################################################
118 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
120 ################################################################################
122 def session_wrapper(fn):
124 Wrapper around common ".., session=None):" handling. If the wrapped
125 function is called without passing 'session', we create a local one
126 and destroy it when the function ends.
128 Also attaches a commit_or_flush method to the session; if we created a
129 local session, this is a synonym for session.commit(), otherwise it is a
130 synonym for session.flush().
133 def wrapped(*args, **kwargs):
134 private_transaction = False
136 # Find the session object
137 session = kwargs.get('session')
140 if len(args) <= len(getargspec(fn)[0]) - 1:
141 # No session specified as last argument or in kwargs
142 private_transaction = True
143 session = kwargs['session'] = DBConn().session()
145 # Session is last argument in args
149 session = args[-1] = DBConn().session()
150 private_transaction = True
152 if private_transaction:
153 session.commit_or_flush = session.commit
155 session.commit_or_flush = session.flush
158 return fn(*args, **kwargs)
160 if private_transaction:
161 # We created a session; close it.
164 wrapped.__doc__ = fn.__doc__
165 wrapped.func_name = fn.func_name
169 __all__.append('session_wrapper')
171 ################################################################################
173 class ORMObject(object):
175 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
176 derived classes must implement the properties() method.
179 def properties(self):
181 This method should be implemented by all derived classes and returns a
182 list of the important properties. The properties 'created' and
183 'modified' will be added automatically. A suffix '_count' should be
184 added to properties that are lists or query objects. The most important
185 property name should be returned as the first element in the list
186 because it is used by repr().
192 Returns a JSON representation of the object based on the properties
193 returned from the properties() method.
196 # add created and modified
197 all_properties = self.properties() + ['created', 'modified']
198 for property in all_properties:
199 # check for list or query
200 if property[-6:] == '_count':
201 real_property = property[:-6]
202 if not hasattr(self, real_property):
204 value = getattr(self, real_property)
205 if hasattr(value, '__len__'):
208 elif hasattr(value, 'count'):
209 # query (but not during validation)
210 if self.in_validation:
212 value = value.count()
214 raise KeyError('Do not understand property %s.' % property)
216 if not hasattr(self, property):
219 value = getattr(self, property)
223 elif isinstance(value, ORMObject):
224 # use repr() for ORMObject types
227 # we want a string for all other types because json cannot
230 data[property] = value
231 return json.dumps(data)
235 Returns the name of the class.
237 return type(self).__name__
241 Returns a short string representation of the object using the first
242 element from the properties() method.
244 primary_property = self.properties()[0]
245 value = getattr(self, primary_property)
246 return '<%s %s>' % (self.classname(), str(value))
250 Returns a human readable form of the object using the properties()
253 return '<%s %s>' % (self.classname(), self.json())
255 def not_null_constraints(self):
257 Returns a list of properties that must be not NULL. Derived classes
258 should override this method if needed.
262 validation_message = \
263 "Validation failed because property '%s' must not be empty in object\n%s"
265 in_validation = False
269 This function validates the not NULL constraints as returned by
270 not_null_constraints(). It raises the DBUpdateError exception if
273 for property in self.not_null_constraints():
274 # TODO: It is a bit awkward that the mapper configuration allow
275 # directly setting the numeric _id columns. We should get rid of it
277 if hasattr(self, property + '_id') and \
278 getattr(self, property + '_id') is not None:
280 if not hasattr(self, property) or getattr(self, property) is None:
281 # str() might lead to races due to a 2nd flush
282 self.in_validation = True
283 message = self.validation_message % (property, str(self))
284 self.in_validation = False
285 raise DBUpdateError(message)
289 def get(cls, primary_key, session = None):
291 This is a support function that allows getting an object by its primary
294 Architecture.get(3[, session])
296 instead of the more verbose
298 session.query(Architecture).get(3)
300 return session.query(cls).get(primary_key)
302 def session(self, replace = False):
304 Returns the current session that is associated with the object. May
305 return None is object is in detached state.
308 return object_session(self)
310 def clone(self, session = None):
312 Clones the current object in a new session and returns the new clone. A
313 fresh session is created if the optional session parameter is not
314 provided. The function will fail if a session is provided and has
317 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
318 an existing object to allow several threads to work with their own
319 instances of an ORMObject.
321 WARNING: Only persistent (committed) objects can be cloned. Changes
322 made to the original object that are not committed yet will get lost.
323 The session of the new object will always be rolled back to avoid
327 if self.session() is None:
328 raise RuntimeError( \
329 'Method clone() failed for detached object:\n%s' % self)
330 self.session().flush()
331 mapper = object_mapper(self)
332 primary_key = mapper.primary_key_from_instance(self)
333 object_class = self.__class__
335 session = DBConn().session()
336 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
337 raise RuntimeError( \
338 'Method clone() failed due to unflushed changes in session.')
339 new_object = session.query(object_class).get(primary_key)
341 if new_object is None:
342 raise RuntimeError( \
343 'Method clone() failed for non-persistent object:\n%s' % self)
346 __all__.append('ORMObject')
348 ################################################################################
350 class Validator(MapperExtension):
352 This class calls the validate() method for each instance for the
353 'before_update' and 'before_insert' events. A global object validator is
354 used for configuring the individual mappers.
357 def before_update(self, mapper, connection, instance):
361 def before_insert(self, mapper, connection, instance):
365 validator = Validator()
367 ################################################################################
369 class Architecture(ORMObject):
370 def __init__(self, arch_string = None, description = None):
371 self.arch_string = arch_string
372 self.description = description
374 def __eq__(self, val):
375 if isinstance(val, str):
376 return (self.arch_string== val)
377 # This signals to use the normal comparison operator
378 return NotImplemented
380 def __ne__(self, val):
381 if isinstance(val, str):
382 return (self.arch_string != val)
383 # This signals to use the normal comparison operator
384 return NotImplemented
386 def properties(self):
387 return ['arch_string', 'arch_id', 'suites_count']
389 def not_null_constraints(self):
390 return ['arch_string']
392 __all__.append('Architecture')
395 def get_architecture(architecture, session=None):
397 Returns database id for given C{architecture}.
399 @type architecture: string
400 @param architecture: The name of the architecture
402 @type session: Session
403 @param session: Optional SQLA session object (a temporary one will be
404 generated if not supplied)
407 @return: Architecture object for the given arch (None if not present)
410 q = session.query(Architecture).filter_by(arch_string=architecture)
414 except NoResultFound:
417 __all__.append('get_architecture')
419 # TODO: should be removed because the implementation is too trivial
421 def get_architecture_suites(architecture, session=None):
423 Returns list of Suite objects for given C{architecture} name
425 @type architecture: str
426 @param architecture: Architecture name to search for
428 @type session: Session
429 @param session: Optional SQL session object (a temporary one will be
430 generated if not supplied)
433 @return: list of Suite objects for the given name (may be empty)
436 return get_architecture(architecture, session).suites
438 __all__.append('get_architecture_suites')
440 ################################################################################
442 class Archive(object):
443 def __init__(self, *args, **kwargs):
447 return '<Archive %s>' % self.archive_name
449 __all__.append('Archive')
452 def get_archive(archive, session=None):
454 returns database id for given C{archive}.
456 @type archive: string
457 @param archive: the name of the arhive
459 @type session: Session
460 @param session: Optional SQLA session object (a temporary one will be
461 generated if not supplied)
464 @return: Archive object for the given name (None if not present)
467 archive = archive.lower()
469 q = session.query(Archive).filter_by(archive_name=archive)
473 except NoResultFound:
476 __all__.append('get_archive')
478 ################################################################################
480 class BinContents(ORMObject):
481 def __init__(self, file = None, binary = None):
485 def properties(self):
486 return ['file', 'binary']
488 __all__.append('BinContents')
490 ################################################################################
492 def subprocess_setup():
493 # Python installs a SIGPIPE handler by default. This is usually not what
494 # non-Python subprocesses expect.
495 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
497 class DBBinary(ORMObject):
498 def __init__(self, package = None, source = None, version = None, \
499 maintainer = None, architecture = None, poolfile = None, \
501 self.package = package
503 self.version = version
504 self.maintainer = maintainer
505 self.architecture = architecture
506 self.poolfile = poolfile
507 self.binarytype = binarytype
511 return self.binary_id
513 def properties(self):
514 return ['package', 'version', 'maintainer', 'source', 'architecture', \
515 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
516 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
518 def not_null_constraints(self):
519 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
522 metadata = association_proxy('key', 'value')
524 def get_component_name(self):
525 return self.poolfile.location.component.component_name
527 def scan_contents(self):
529 Yields the contents of the package. Only regular files are yielded and
530 the path names are normalized after converting them from either utf-8
531 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
532 package does not contain any regular file.
534 fullpath = self.poolfile.fullpath
535 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
536 preexec_fn = subprocess_setup)
537 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
538 for member in tar.getmembers():
539 if not member.isdir():
540 name = normpath(member.name)
541 # enforce proper utf-8 encoding
544 except UnicodeDecodeError:
545 name = name.decode('iso8859-1').encode('utf-8')
551 def read_control(self):
553 Reads the control information from a binary.
556 @return: stanza text of the control section.
559 fullpath = self.poolfile.fullpath
560 deb_file = open(fullpath, 'r')
561 stanza = apt_inst.debExtractControl(deb_file)
566 def read_control_fields(self):
568 Reads the control information from a binary and return
572 @return: fields of the control section as a dictionary.
575 stanza = self.read_control()
576 return apt_pkg.TagSection(stanza)
578 __all__.append('DBBinary')
581 def get_suites_binary_in(package, session=None):
583 Returns list of Suite objects which given C{package} name is in
586 @param package: DBBinary package name to search for
589 @return: list of Suite objects for the given package
592 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
594 __all__.append('get_suites_binary_in')
597 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
599 Returns the component name of the newest binary package in suite_list or
600 None if no package is found. The result can be optionally filtered by a list
601 of architecture names.
604 @param package: DBBinary package name to search for
606 @type suite_list: list of str
607 @param suite_list: list of suite_name items
609 @type arch_list: list of str
610 @param arch_list: optional list of arch_string items that defaults to []
612 @rtype: str or NoneType
613 @return: name of component or None
616 q = session.query(DBBinary).filter_by(package = package). \
617 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
618 if len(arch_list) > 0:
619 q = q.join(DBBinary.architecture). \
620 filter(Architecture.arch_string.in_(arch_list))
621 binary = q.order_by(desc(DBBinary.version)).first()
625 return binary.get_component_name()
627 __all__.append('get_component_by_package_suite')
629 ################################################################################
631 class BinaryACL(object):
632 def __init__(self, *args, **kwargs):
636 return '<BinaryACL %s>' % self.binary_acl_id
638 __all__.append('BinaryACL')
640 ################################################################################
642 class BinaryACLMap(object):
643 def __init__(self, *args, **kwargs):
647 return '<BinaryACLMap %s>' % self.binary_acl_map_id
649 __all__.append('BinaryACLMap')
651 ################################################################################
656 ArchiveDir "%(archivepath)s";
657 OverrideDir "%(overridedir)s";
658 CacheDir "%(cachedir)s";
663 Packages::Compress ". bzip2 gzip";
664 Sources::Compress ". bzip2 gzip";
669 bindirectory "incoming"
674 BinOverride "override.sid.all3";
675 BinCacheDB "packages-accepted.db";
677 FileList "%(filelist)s";
680 Packages::Extensions ".deb .udeb";
683 bindirectory "incoming/"
686 BinOverride "override.sid.all3";
687 SrcOverride "override.sid.all3.src";
688 FileList "%(filelist)s";
692 class BuildQueue(object):
693 def __init__(self, *args, **kwargs):
697 return '<BuildQueue %s>' % self.queue_name
699 def write_metadata(self, starttime, force=False):
700 # Do we write out metafiles?
701 if not (force or self.generate_metadata):
704 session = DBConn().session().object_session(self)
706 fl_fd = fl_name = ac_fd = ac_name = None
708 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
709 startdir = os.getcwd()
712 # Grab files we want to include
713 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
714 newer += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
715 # Write file list with newer files
716 (fl_fd, fl_name) = mkstemp()
718 os.write(fl_fd, '%s\n' % n.fullpath)
723 # Write minimal apt.conf
724 # TODO: Remove hardcoding from template
725 (ac_fd, ac_name) = mkstemp()
726 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
728 'cachedir': cnf["Dir::Cache"],
729 'overridedir': cnf["Dir::Override"],
733 # Run apt-ftparchive generate
734 os.chdir(os.path.dirname(ac_name))
735 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
737 # Run apt-ftparchive release
738 # TODO: Eww - fix this
739 bname = os.path.basename(self.path)
743 # We have to remove the Release file otherwise it'll be included in the
746 os.unlink(os.path.join(bname, 'Release'))
750 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
752 # Crude hack with open and append, but this whole section is and should be redone.
753 if self.notautomatic:
754 release=open("Release", "a")
755 release.write("NotAutomatic: yes\n")
760 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
761 if cnf.has_key("Dinstall::SigningPubKeyring"):
762 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
764 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
766 # Move the files if we got this far
767 os.rename('Release', os.path.join(bname, 'Release'))
769 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
771 # Clean up any left behind files
798 def clean_and_update(self, starttime, Logger, dryrun=False):
799 """WARNING: This routine commits for you"""
800 session = DBConn().session().object_session(self)
802 if self.generate_metadata and not dryrun:
803 self.write_metadata(starttime)
805 # Grab files older than our execution time
806 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
807 older += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
813 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
815 Logger.log(["I: Removing %s from the queue" % o.fullpath])
816 os.unlink(o.fullpath)
819 # If it wasn't there, don't worry
820 if e.errno == ENOENT:
823 # TODO: Replace with proper logging call
824 Logger.log(["E: Could not remove %s" % o.fullpath])
831 for f in os.listdir(self.path):
832 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
835 if not self.contains_filename(f):
836 fp = os.path.join(self.path, f)
838 Logger.log(["I: Would remove unused link %s" % fp])
840 Logger.log(["I: Removing unused link %s" % fp])
844 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
846 def contains_filename(self, filename):
849 @returns True if filename is supposed to be in the queue; False otherwise
851 session = DBConn().session().object_session(self)
852 if session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id, filename = filename).count() > 0:
854 elif session.query(BuildQueuePolicyFile).filter_by(build_queue = self, filename = filename).count() > 0:
858 def add_file_from_pool(self, poolfile):
859 """Copies a file into the pool. Assumes that the PoolFile object is
860 attached to the same SQLAlchemy session as the Queue object is.
862 The caller is responsible for committing after calling this function."""
863 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
865 # Check if we have a file of this name or this ID already
866 for f in self.queuefiles:
867 if (f.fileid is not None and f.fileid == poolfile.file_id) or \
868 (f.poolfile is not None and f.poolfile.filename == poolfile_basename):
869 # In this case, update the BuildQueueFile entry so we
870 # don't remove it too early
871 f.lastused = datetime.now()
872 DBConn().session().object_session(poolfile).add(f)
875 # Prepare BuildQueueFile object
876 qf = BuildQueueFile()
877 qf.build_queue_id = self.queue_id
878 qf.lastused = datetime.now()
879 qf.filename = poolfile_basename
881 targetpath = poolfile.fullpath
882 queuepath = os.path.join(self.path, poolfile_basename)
886 # We need to copy instead of symlink
888 utils.copy(targetpath, queuepath)
889 # NULL in the fileid field implies a copy
892 os.symlink(targetpath, queuepath)
893 qf.fileid = poolfile.file_id
894 except FileExistsError:
895 if not poolfile.identical_to(queuepath):
900 # Get the same session as the PoolFile is using and add the qf to it
901 DBConn().session().object_session(poolfile).add(qf)
905 def add_changes_from_policy_queue(self, policyqueue, changes):
907 Copies a changes from a policy queue together with its poolfiles.
909 @type policyqueue: PolicyQueue
910 @param policyqueue: policy queue to copy the changes from
912 @type changes: DBChange
913 @param changes: changes to copy to this build queue
915 for policyqueuefile in changes.files:
916 self.add_file_from_policy_queue(policyqueue, policyqueuefile)
917 for poolfile in changes.poolfiles:
918 self.add_file_from_pool(poolfile)
920 def add_file_from_policy_queue(self, policyqueue, policyqueuefile):
922 Copies a file from a policy queue.
923 Assumes that the policyqueuefile is attached to the same SQLAlchemy
924 session as the Queue object is. The caller is responsible for
925 committing after calling this function.
927 @type policyqueue: PolicyQueue
928 @param policyqueue: policy queue to copy the file from
930 @type policyqueuefile: ChangePendingFile
931 @param policyqueuefile: file to be added to the build queue
933 session = DBConn().session().object_session(policyqueuefile)
935 # Is the file already there?
937 f = session.query(BuildQueuePolicyFile).filter_by(build_queue=self, file=policyqueuefile).one()
938 f.lastused = datetime.now()
940 except NoResultFound:
941 pass # continue below
943 # We have to add the file.
944 f = BuildQueuePolicyFile()
946 f.file = policyqueuefile
947 f.filename = policyqueuefile.filename
949 source = os.path.join(policyqueue.path, policyqueuefile.filename)
952 # Always copy files from policy queues as they might move around.
954 utils.copy(source, target)
955 except FileExistsError:
956 if not policyqueuefile.identical_to(target):
964 __all__.append('BuildQueue')
967 def get_build_queue(queuename, session=None):
969 Returns BuildQueue object for given C{queue name}, creating it if it does not
972 @type queuename: string
973 @param queuename: The name of the queue
975 @type session: Session
976 @param session: Optional SQLA session object (a temporary one will be
977 generated if not supplied)
980 @return: BuildQueue object for the given queue
983 q = session.query(BuildQueue).filter_by(queue_name=queuename)
987 except NoResultFound:
990 __all__.append('get_build_queue')
992 ################################################################################
994 class BuildQueueFile(object):
996 BuildQueueFile represents a file in a build queue coming from a pool.
999 def __init__(self, *args, **kwargs):
1003 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
1007 return os.path.join(self.buildqueue.path, self.filename)
1010 __all__.append('BuildQueueFile')
1012 ################################################################################
1014 class BuildQueuePolicyFile(object):
1016 BuildQueuePolicyFile represents a file in a build queue that comes from a
1017 policy queue (and not a pool).
1020 def __init__(self, *args, **kwargs):
1024 #def filename(self):
1025 # return self.file.filename
1029 return os.path.join(self.build_queue.path, self.filename)
1031 __all__.append('BuildQueuePolicyFile')
1033 ################################################################################
1035 class ChangePendingBinary(object):
1036 def __init__(self, *args, **kwargs):
1040 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
1042 __all__.append('ChangePendingBinary')
1044 ################################################################################
1046 class ChangePendingFile(object):
1047 def __init__(self, *args, **kwargs):
1051 return '<ChangePendingFile %s>' % self.change_pending_file_id
1053 def identical_to(self, filename):
1055 compare size and hash with the given file
1058 @return: true if the given file has the same size and hash as this object; false otherwise
1060 st = os.stat(filename)
1061 if self.size != st.st_size:
1064 f = open(filename, "r")
1065 sha256sum = apt_pkg.sha256sum(f)
1066 if sha256sum != self.sha256sum:
1071 __all__.append('ChangePendingFile')
1073 ################################################################################
1075 class ChangePendingSource(object):
1076 def __init__(self, *args, **kwargs):
1080 return '<ChangePendingSource %s>' % self.change_pending_source_id
1082 __all__.append('ChangePendingSource')
1084 ################################################################################
1086 class Component(ORMObject):
1087 def __init__(self, component_name = None):
1088 self.component_name = component_name
1090 def __eq__(self, val):
1091 if isinstance(val, str):
1092 return (self.component_name == val)
1093 # This signals to use the normal comparison operator
1094 return NotImplemented
1096 def __ne__(self, val):
1097 if isinstance(val, str):
1098 return (self.component_name != val)
1099 # This signals to use the normal comparison operator
1100 return NotImplemented
1102 def properties(self):
1103 return ['component_name', 'component_id', 'description', \
1104 'location_count', 'meets_dfsg', 'overrides_count']
1106 def not_null_constraints(self):
1107 return ['component_name']
1110 __all__.append('Component')
1113 def get_component(component, session=None):
1115 Returns database id for given C{component}.
1117 @type component: string
1118 @param component: The name of the override type
1121 @return: the database id for the given component
1124 component = component.lower()
1126 q = session.query(Component).filter_by(component_name=component)
1130 except NoResultFound:
1133 __all__.append('get_component')
1136 def get_component_names(session=None):
1138 Returns list of strings of component names.
1141 @return: list of strings of component names
1144 return [ x.component_name for x in session.query(Component).all() ]
1146 __all__.append('get_component_names')
1148 ################################################################################
1150 class DBConfig(object):
1151 def __init__(self, *args, **kwargs):
1155 return '<DBConfig %s>' % self.name
1157 __all__.append('DBConfig')
1159 ################################################################################
1162 def get_or_set_contents_file_id(filename, session=None):
1164 Returns database id for given filename.
1166 If no matching file is found, a row is inserted.
1168 @type filename: string
1169 @param filename: The filename
1170 @type session: SQLAlchemy
1171 @param session: Optional SQL session object (a temporary one will be
1172 generated if not supplied). If not passed, a commit will be performed at
1173 the end of the function, otherwise the caller is responsible for commiting.
1176 @return: the database id for the given component
1179 q = session.query(ContentFilename).filter_by(filename=filename)
1182 ret = q.one().cafilename_id
1183 except NoResultFound:
1184 cf = ContentFilename()
1185 cf.filename = filename
1187 session.commit_or_flush()
1188 ret = cf.cafilename_id
1192 __all__.append('get_or_set_contents_file_id')
1195 def get_contents(suite, overridetype, section=None, session=None):
1197 Returns contents for a suite / overridetype combination, limiting
1198 to a section if not None.
1201 @param suite: Suite object
1203 @type overridetype: OverrideType
1204 @param overridetype: OverrideType object
1206 @type section: Section
1207 @param section: Optional section object to limit results to
1209 @type session: SQLAlchemy
1210 @param session: Optional SQL session object (a temporary one will be
1211 generated if not supplied)
1213 @rtype: ResultsProxy
1214 @return: ResultsProxy object set up to return tuples of (filename, section,
1218 # find me all of the contents for a given suite
1219 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1223 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1224 JOIN content_file_names n ON (c.filename=n.id)
1225 JOIN binaries b ON (b.id=c.binary_pkg)
1226 JOIN override o ON (o.package=b.package)
1227 JOIN section s ON (s.id=o.section)
1228 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1229 AND b.type=:overridetypename"""
1231 vals = {'suiteid': suite.suite_id,
1232 'overridetypeid': overridetype.overridetype_id,
1233 'overridetypename': overridetype.overridetype}
1235 if section is not None:
1236 contents_q += " AND s.id = :sectionid"
1237 vals['sectionid'] = section.section_id
1239 contents_q += " ORDER BY fn"
1241 return session.execute(contents_q, vals)
1243 __all__.append('get_contents')
1245 ################################################################################
1247 class ContentFilepath(object):
1248 def __init__(self, *args, **kwargs):
1252 return '<ContentFilepath %s>' % self.filepath
1254 __all__.append('ContentFilepath')
1257 def get_or_set_contents_path_id(filepath, session=None):
1259 Returns database id for given path.
1261 If no matching file is found, a row is inserted.
1263 @type filepath: string
1264 @param filepath: The filepath
1266 @type session: SQLAlchemy
1267 @param session: Optional SQL session object (a temporary one will be
1268 generated if not supplied). If not passed, a commit will be performed at
1269 the end of the function, otherwise the caller is responsible for commiting.
1272 @return: the database id for the given path
1275 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1278 ret = q.one().cafilepath_id
1279 except NoResultFound:
1280 cf = ContentFilepath()
1281 cf.filepath = filepath
1283 session.commit_or_flush()
1284 ret = cf.cafilepath_id
1288 __all__.append('get_or_set_contents_path_id')
1290 ################################################################################
1292 class ContentAssociation(object):
1293 def __init__(self, *args, **kwargs):
1297 return '<ContentAssociation %s>' % self.ca_id
1299 __all__.append('ContentAssociation')
1301 def insert_content_paths(binary_id, fullpaths, session=None):
1303 Make sure given path is associated with given binary id
1305 @type binary_id: int
1306 @param binary_id: the id of the binary
1307 @type fullpaths: list
1308 @param fullpaths: the list of paths of the file being associated with the binary
1309 @type session: SQLAlchemy session
1310 @param session: Optional SQLAlchemy session. If this is passed, the caller
1311 is responsible for ensuring a transaction has begun and committing the
1312 results or rolling back based on the result code. If not passed, a commit
1313 will be performed at the end of the function, otherwise the caller is
1314 responsible for commiting.
1316 @return: True upon success
1319 privatetrans = False
1321 session = DBConn().session()
1326 def generate_path_dicts():
1327 for fullpath in fullpaths:
1328 if fullpath.startswith( './' ):
1329 fullpath = fullpath[2:]
1331 yield {'filename':fullpath, 'id': binary_id }
1333 for d in generate_path_dicts():
1334 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1343 traceback.print_exc()
1345 # Only rollback if we set up the session ourself
1352 __all__.append('insert_content_paths')
1354 ################################################################################
1356 class DSCFile(object):
1357 def __init__(self, *args, **kwargs):
1361 return '<DSCFile %s>' % self.dscfile_id
1363 __all__.append('DSCFile')
1366 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1368 Returns a list of DSCFiles which may be empty
1370 @type dscfile_id: int (optional)
1371 @param dscfile_id: the dscfile_id of the DSCFiles to find
1373 @type source_id: int (optional)
1374 @param source_id: the source id related to the DSCFiles to find
1376 @type poolfile_id: int (optional)
1377 @param poolfile_id: the poolfile id related to the DSCFiles to find
1380 @return: Possibly empty list of DSCFiles
1383 q = session.query(DSCFile)
1385 if dscfile_id is not None:
1386 q = q.filter_by(dscfile_id=dscfile_id)
1388 if source_id is not None:
1389 q = q.filter_by(source_id=source_id)
1391 if poolfile_id is not None:
1392 q = q.filter_by(poolfile_id=poolfile_id)
1396 __all__.append('get_dscfiles')
1398 ################################################################################
1400 class ExternalOverride(ORMObject):
1401 def __init__(self, *args, **kwargs):
1405 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1407 __all__.append('ExternalOverride')
1409 ################################################################################
1411 class PoolFile(ORMObject):
1412 def __init__(self, filename = None, location = None, filesize = -1, \
1414 self.filename = filename
1415 self.location = location
1416 self.filesize = filesize
1417 self.md5sum = md5sum
1421 return os.path.join(self.location.path, self.filename)
1423 def is_valid(self, filesize = -1, md5sum = None):
1424 return self.filesize == long(filesize) and self.md5sum == md5sum
1426 def properties(self):
1427 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1428 'sha256sum', 'location', 'source', 'binary', 'last_used']
1430 def not_null_constraints(self):
1431 return ['filename', 'md5sum', 'location']
1433 def identical_to(self, filename):
1435 compare size and hash with the given file
1438 @return: true if the given file has the same size and hash as this object; false otherwise
1440 st = os.stat(filename)
1441 if self.filesize != st.st_size:
1444 f = open(filename, "r")
1445 sha256sum = apt_pkg.sha256sum(f)
1446 if sha256sum != self.sha256sum:
1451 __all__.append('PoolFile')
1454 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1457 (ValidFileFound [boolean], PoolFile object or None)
1459 @type filename: string
1460 @param filename: the filename of the file to check against the DB
1463 @param filesize: the size of the file to check against the DB
1465 @type md5sum: string
1466 @param md5sum: the md5sum of the file to check against the DB
1468 @type location_id: int
1469 @param location_id: the id of the location to look in
1472 @return: Tuple of length 2.
1473 - If valid pool file found: (C{True}, C{PoolFile object})
1474 - If valid pool file not found:
1475 - (C{False}, C{None}) if no file found
1476 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1479 poolfile = session.query(Location).get(location_id). \
1480 files.filter_by(filename=filename).first()
1482 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1485 return (valid, poolfile)
1487 __all__.append('check_poolfile')
1489 # TODO: the implementation can trivially be inlined at the place where the
1490 # function is called
1492 def get_poolfile_by_id(file_id, session=None):
1494 Returns a PoolFile objects or None for the given id
1497 @param file_id: the id of the file to look for
1499 @rtype: PoolFile or None
1500 @return: either the PoolFile object or None
1503 return session.query(PoolFile).get(file_id)
1505 __all__.append('get_poolfile_by_id')
1508 def get_poolfile_like_name(filename, session=None):
1510 Returns an array of PoolFile objects which are like the given name
1512 @type filename: string
1513 @param filename: the filename of the file to check against the DB
1516 @return: array of PoolFile objects
1519 # TODO: There must be a way of properly using bind parameters with %FOO%
1520 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1524 __all__.append('get_poolfile_like_name')
1527 def add_poolfile(filename, datadict, location_id, session=None):
1529 Add a new file to the pool
1531 @type filename: string
1532 @param filename: filename
1534 @type datadict: dict
1535 @param datadict: dict with needed data
1537 @type location_id: int
1538 @param location_id: database id of the location
1541 @return: the PoolFile object created
1543 poolfile = PoolFile()
1544 poolfile.filename = filename
1545 poolfile.filesize = datadict["size"]
1546 poolfile.md5sum = datadict["md5sum"]
1547 poolfile.sha1sum = datadict["sha1sum"]
1548 poolfile.sha256sum = datadict["sha256sum"]
1549 poolfile.location_id = location_id
1551 session.add(poolfile)
1552 # Flush to get a file id (NB: This is not a commit)
1557 __all__.append('add_poolfile')
1559 ################################################################################
1561 class Fingerprint(ORMObject):
1562 def __init__(self, fingerprint = None):
1563 self.fingerprint = fingerprint
1565 def properties(self):
1566 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1569 def not_null_constraints(self):
1570 return ['fingerprint']
1572 __all__.append('Fingerprint')
1575 def get_fingerprint(fpr, session=None):
1577 Returns Fingerprint object for given fpr.
1580 @param fpr: The fpr to find / add
1582 @type session: SQLAlchemy
1583 @param session: Optional SQL session object (a temporary one will be
1584 generated if not supplied).
1587 @return: the Fingerprint object for the given fpr or None
1590 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1594 except NoResultFound:
1599 __all__.append('get_fingerprint')
1602 def get_or_set_fingerprint(fpr, session=None):
1604 Returns Fingerprint object for given fpr.
1606 If no matching fpr is found, a row is inserted.
1609 @param fpr: The fpr to find / add
1611 @type session: SQLAlchemy
1612 @param session: Optional SQL session object (a temporary one will be
1613 generated if not supplied). If not passed, a commit will be performed at
1614 the end of the function, otherwise the caller is responsible for commiting.
1615 A flush will be performed either way.
1618 @return: the Fingerprint object for the given fpr
1621 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1625 except NoResultFound:
1626 fingerprint = Fingerprint()
1627 fingerprint.fingerprint = fpr
1628 session.add(fingerprint)
1629 session.commit_or_flush()
1634 __all__.append('get_or_set_fingerprint')
1636 ################################################################################
1638 # Helper routine for Keyring class
1639 def get_ldap_name(entry):
1641 for k in ["cn", "mn", "sn"]:
1643 if ret and ret[0] != "" and ret[0] != "-":
1645 return " ".join(name)
1647 ################################################################################
1649 class Keyring(object):
1650 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1651 " --with-colons --fingerprint --fingerprint"
1656 def __init__(self, *args, **kwargs):
1660 return '<Keyring %s>' % self.keyring_name
1662 def de_escape_gpg_str(self, txt):
1663 esclist = re.split(r'(\\x..)', txt)
1664 for x in range(1,len(esclist),2):
1665 esclist[x] = "%c" % (int(esclist[x][2:],16))
1666 return "".join(esclist)
1668 def parse_address(self, uid):
1669 """parses uid and returns a tuple of real name and email address"""
1671 (name, address) = email.Utils.parseaddr(uid)
1672 name = re.sub(r"\s*[(].*[)]", "", name)
1673 name = self.de_escape_gpg_str(name)
1676 return (name, address)
1678 def load_keys(self, keyring):
1679 if not self.keyring_id:
1680 raise Exception('Must be initialized with database information')
1682 k = os.popen(self.gpg_invocation % keyring, "r")
1686 for line in k.xreadlines():
1687 field = line.split(":")
1688 if field[0] == "pub":
1691 (name, addr) = self.parse_address(field[9])
1693 self.keys[key]["email"] = addr
1694 self.keys[key]["name"] = name
1695 self.keys[key]["fingerprints"] = []
1697 elif key and field[0] == "sub" and len(field) >= 12:
1698 signingkey = ("s" in field[11])
1699 elif key and field[0] == "uid":
1700 (name, addr) = self.parse_address(field[9])
1701 if "email" not in self.keys[key] and "@" in addr:
1702 self.keys[key]["email"] = addr
1703 self.keys[key]["name"] = name
1704 elif signingkey and field[0] == "fpr":
1705 self.keys[key]["fingerprints"].append(field[9])
1706 self.fpr_lookup[field[9]] = key
1708 def import_users_from_ldap(self, session):
1712 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1713 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1715 l = ldap.open(LDAPServer)
1716 l.simple_bind_s("","")
1717 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1718 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1719 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1721 ldap_fin_uid_id = {}
1728 uid = entry["uid"][0]
1729 name = get_ldap_name(entry)
1730 fingerprints = entry["keyFingerPrint"]
1732 for f in fingerprints:
1733 key = self.fpr_lookup.get(f, None)
1734 if key not in self.keys:
1736 self.keys[key]["uid"] = uid
1740 keyid = get_or_set_uid(uid, session).uid_id
1741 byuid[keyid] = (uid, name)
1742 byname[uid] = (keyid, name)
1744 return (byname, byuid)
1746 def generate_users_from_keyring(self, format, session):
1750 for x in self.keys.keys():
1751 if "email" not in self.keys[x]:
1753 self.keys[x]["uid"] = format % "invalid-uid"
1755 uid = format % self.keys[x]["email"]
1756 keyid = get_or_set_uid(uid, session).uid_id
1757 byuid[keyid] = (uid, self.keys[x]["name"])
1758 byname[uid] = (keyid, self.keys[x]["name"])
1759 self.keys[x]["uid"] = uid
1762 uid = format % "invalid-uid"
1763 keyid = get_or_set_uid(uid, session).uid_id
1764 byuid[keyid] = (uid, "ungeneratable user id")
1765 byname[uid] = (keyid, "ungeneratable user id")
1767 return (byname, byuid)
1769 __all__.append('Keyring')
1772 def get_keyring(keyring, session=None):
1774 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1775 If C{keyring} already has an entry, simply return the existing Keyring
1777 @type keyring: string
1778 @param keyring: the keyring name
1781 @return: the Keyring object for this keyring
1784 q = session.query(Keyring).filter_by(keyring_name=keyring)
1788 except NoResultFound:
1791 __all__.append('get_keyring')
1794 def get_active_keyring_paths(session=None):
1797 @return: list of active keyring paths
1799 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1801 __all__.append('get_active_keyring_paths')
1804 def get_primary_keyring_path(session=None):
1806 Get the full path to the highest priority active keyring
1809 @return: path to the active keyring with the highest priority or None if no
1810 keyring is configured
1812 keyrings = get_active_keyring_paths()
1814 if len(keyrings) > 0:
1819 __all__.append('get_primary_keyring_path')
1821 ################################################################################
1823 class KeyringACLMap(object):
1824 def __init__(self, *args, **kwargs):
1828 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1830 __all__.append('KeyringACLMap')
1832 ################################################################################
1834 class DBChange(object):
1835 def __init__(self, *args, **kwargs):
1839 return '<DBChange %s>' % self.changesname
1841 def clean_from_queue(self):
1842 session = DBConn().session().object_session(self)
1844 # Remove changes_pool_files entries
1847 # Remove changes_pending_files references
1850 # Clear out of queue
1851 self.in_queue = None
1852 self.approved_for_id = None
1854 __all__.append('DBChange')
1857 def get_dbchange(filename, session=None):
1859 returns DBChange object for given C{filename}.
1861 @type filename: string
1862 @param filename: the name of the file
1864 @type session: Session
1865 @param session: Optional SQLA session object (a temporary one will be
1866 generated if not supplied)
1869 @return: DBChange object for the given filename (C{None} if not present)
1872 q = session.query(DBChange).filter_by(changesname=filename)
1876 except NoResultFound:
1879 __all__.append('get_dbchange')
1881 ################################################################################
1883 class Location(ORMObject):
1884 def __init__(self, path = None, component = None):
1886 self.component = component
1887 # the column 'type' should go away, see comment at mapper
1888 self.archive_type = 'pool'
1890 def properties(self):
1891 return ['path', 'location_id', 'archive_type', 'component', \
1894 def not_null_constraints(self):
1895 return ['path', 'archive_type']
1897 __all__.append('Location')
1900 def get_location(location, component=None, archive=None, session=None):
1902 Returns Location object for the given combination of location, component
1905 @type location: string
1906 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1908 @type component: string
1909 @param component: the component name (if None, no restriction applied)
1911 @type archive: string
1912 @param archive: the archive name (if None, no restriction applied)
1914 @rtype: Location / None
1915 @return: Either a Location object or None if one can't be found
1918 q = session.query(Location).filter_by(path=location)
1920 if archive is not None:
1921 q = q.join(Archive).filter_by(archive_name=archive)
1923 if component is not None:
1924 q = q.join(Component).filter_by(component_name=component)
1928 except NoResultFound:
1931 __all__.append('get_location')
1933 ################################################################################
1935 class Maintainer(ORMObject):
1936 def __init__(self, name = None):
1939 def properties(self):
1940 return ['name', 'maintainer_id']
1942 def not_null_constraints(self):
1945 def get_split_maintainer(self):
1946 if not hasattr(self, 'name') or self.name is None:
1947 return ('', '', '', '')
1949 return fix_maintainer(self.name.strip())
1951 __all__.append('Maintainer')
1954 def get_or_set_maintainer(name, session=None):
1956 Returns Maintainer object for given maintainer name.
1958 If no matching maintainer name is found, a row is inserted.
1961 @param name: The maintainer name to add
1963 @type session: SQLAlchemy
1964 @param session: Optional SQL session object (a temporary one will be
1965 generated if not supplied). If not passed, a commit will be performed at
1966 the end of the function, otherwise the caller is responsible for commiting.
1967 A flush will be performed either way.
1970 @return: the Maintainer object for the given maintainer
1973 q = session.query(Maintainer).filter_by(name=name)
1976 except NoResultFound:
1977 maintainer = Maintainer()
1978 maintainer.name = name
1979 session.add(maintainer)
1980 session.commit_or_flush()
1985 __all__.append('get_or_set_maintainer')
1988 def get_maintainer(maintainer_id, session=None):
1990 Return the name of the maintainer behind C{maintainer_id} or None if that
1991 maintainer_id is invalid.
1993 @type maintainer_id: int
1994 @param maintainer_id: the id of the maintainer
1997 @return: the Maintainer with this C{maintainer_id}
2000 return session.query(Maintainer).get(maintainer_id)
2002 __all__.append('get_maintainer')
2004 ################################################################################
2006 class NewComment(object):
2007 def __init__(self, *args, **kwargs):
2011 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
2013 __all__.append('NewComment')
2016 def has_new_comment(package, version, session=None):
2018 Returns true if the given combination of C{package}, C{version} has a comment.
2020 @type package: string
2021 @param package: name of the package
2023 @type version: string
2024 @param version: package version
2026 @type session: Session
2027 @param session: Optional SQLA session object (a temporary one will be
2028 generated if not supplied)
2034 q = session.query(NewComment)
2035 q = q.filter_by(package=package)
2036 q = q.filter_by(version=version)
2038 return bool(q.count() > 0)
2040 __all__.append('has_new_comment')
2043 def get_new_comments(package=None, version=None, comment_id=None, session=None):
2045 Returns (possibly empty) list of NewComment objects for the given
2048 @type package: string (optional)
2049 @param package: name of the package
2051 @type version: string (optional)
2052 @param version: package version
2054 @type comment_id: int (optional)
2055 @param comment_id: An id of a comment
2057 @type session: Session
2058 @param session: Optional SQLA session object (a temporary one will be
2059 generated if not supplied)
2062 @return: A (possibly empty) list of NewComment objects will be returned
2065 q = session.query(NewComment)
2066 if package is not None: q = q.filter_by(package=package)
2067 if version is not None: q = q.filter_by(version=version)
2068 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
2072 __all__.append('get_new_comments')
2074 ################################################################################
2076 class Override(ORMObject):
2077 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
2078 section = None, priority = None):
2079 self.package = package
2081 self.component = component
2082 self.overridetype = overridetype
2083 self.section = section
2084 self.priority = priority
2086 def properties(self):
2087 return ['package', 'suite', 'component', 'overridetype', 'section', \
2090 def not_null_constraints(self):
2091 return ['package', 'suite', 'component', 'overridetype', 'section']
2093 __all__.append('Override')
2096 def get_override(package, suite=None, component=None, overridetype=None, session=None):
2098 Returns Override object for the given parameters
2100 @type package: string
2101 @param package: The name of the package
2103 @type suite: string, list or None
2104 @param suite: The name of the suite (or suites if a list) to limit to. If
2105 None, don't limit. Defaults to None.
2107 @type component: string, list or None
2108 @param component: The name of the component (or components if a list) to
2109 limit to. If None, don't limit. Defaults to None.
2111 @type overridetype: string, list or None
2112 @param overridetype: The name of the overridetype (or overridetypes if a list) to
2113 limit to. If None, don't limit. Defaults to None.
2115 @type session: Session
2116 @param session: Optional SQLA session object (a temporary one will be
2117 generated if not supplied)
2120 @return: A (possibly empty) list of Override objects will be returned
2123 q = session.query(Override)
2124 q = q.filter_by(package=package)
2126 if suite is not None:
2127 if not isinstance(suite, list): suite = [suite]
2128 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
2130 if component is not None:
2131 if not isinstance(component, list): component = [component]
2132 q = q.join(Component).filter(Component.component_name.in_(component))
2134 if overridetype is not None:
2135 if not isinstance(overridetype, list): overridetype = [overridetype]
2136 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
2140 __all__.append('get_override')
2143 ################################################################################
2145 class OverrideType(ORMObject):
2146 def __init__(self, overridetype = None):
2147 self.overridetype = overridetype
2149 def properties(self):
2150 return ['overridetype', 'overridetype_id', 'overrides_count']
2152 def not_null_constraints(self):
2153 return ['overridetype']
2155 __all__.append('OverrideType')
2158 def get_override_type(override_type, session=None):
2160 Returns OverrideType object for given C{override type}.
2162 @type override_type: string
2163 @param override_type: The name of the override type
2165 @type session: Session
2166 @param session: Optional SQLA session object (a temporary one will be
2167 generated if not supplied)
2170 @return: the database id for the given override type
2173 q = session.query(OverrideType).filter_by(overridetype=override_type)
2177 except NoResultFound:
2180 __all__.append('get_override_type')
2182 ################################################################################
2184 class PolicyQueue(object):
2185 def __init__(self, *args, **kwargs):
2189 return '<PolicyQueue %s>' % self.queue_name
2191 __all__.append('PolicyQueue')
2194 def get_policy_queue(queuename, session=None):
2196 Returns PolicyQueue object for given C{queue name}
2198 @type queuename: string
2199 @param queuename: The name of the queue
2201 @type session: Session
2202 @param session: Optional SQLA session object (a temporary one will be
2203 generated if not supplied)
2206 @return: PolicyQueue object for the given queue
2209 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2213 except NoResultFound:
2216 __all__.append('get_policy_queue')
2219 def get_policy_queue_from_path(pathname, session=None):
2221 Returns PolicyQueue object for given C{path name}
2223 @type queuename: string
2224 @param queuename: The path
2226 @type session: Session
2227 @param session: Optional SQLA session object (a temporary one will be
2228 generated if not supplied)
2231 @return: PolicyQueue object for the given queue
2234 q = session.query(PolicyQueue).filter_by(path=pathname)
2238 except NoResultFound:
2241 __all__.append('get_policy_queue_from_path')
2243 ################################################################################
2245 class Priority(ORMObject):
2246 def __init__(self, priority = None, level = None):
2247 self.priority = priority
2250 def properties(self):
2251 return ['priority', 'priority_id', 'level', 'overrides_count']
2253 def not_null_constraints(self):
2254 return ['priority', 'level']
2256 def __eq__(self, val):
2257 if isinstance(val, str):
2258 return (self.priority == val)
2259 # This signals to use the normal comparison operator
2260 return NotImplemented
2262 def __ne__(self, val):
2263 if isinstance(val, str):
2264 return (self.priority != val)
2265 # This signals to use the normal comparison operator
2266 return NotImplemented
2268 __all__.append('Priority')
2271 def get_priority(priority, session=None):
2273 Returns Priority object for given C{priority name}.
2275 @type priority: string
2276 @param priority: The name of the priority
2278 @type session: Session
2279 @param session: Optional SQLA session object (a temporary one will be
2280 generated if not supplied)
2283 @return: Priority object for the given priority
2286 q = session.query(Priority).filter_by(priority=priority)
2290 except NoResultFound:
2293 __all__.append('get_priority')
2296 def get_priorities(session=None):
2298 Returns dictionary of priority names -> id mappings
2300 @type session: Session
2301 @param session: Optional SQL session object (a temporary one will be
2302 generated if not supplied)
2305 @return: dictionary of priority names -> id mappings
2309 q = session.query(Priority)
2311 ret[x.priority] = x.priority_id
2315 __all__.append('get_priorities')
2317 ################################################################################
2319 class Section(ORMObject):
2320 def __init__(self, section = None):
2321 self.section = section
2323 def properties(self):
2324 return ['section', 'section_id', 'overrides_count']
2326 def not_null_constraints(self):
2329 def __eq__(self, val):
2330 if isinstance(val, str):
2331 return (self.section == val)
2332 # This signals to use the normal comparison operator
2333 return NotImplemented
2335 def __ne__(self, val):
2336 if isinstance(val, str):
2337 return (self.section != val)
2338 # This signals to use the normal comparison operator
2339 return NotImplemented
2341 __all__.append('Section')
2344 def get_section(section, session=None):
2346 Returns Section object for given C{section name}.
2348 @type section: string
2349 @param section: The name of the section
2351 @type session: Session
2352 @param session: Optional SQLA session object (a temporary one will be
2353 generated if not supplied)
2356 @return: Section object for the given section name
2359 q = session.query(Section).filter_by(section=section)
2363 except NoResultFound:
2366 __all__.append('get_section')
2369 def get_sections(session=None):
2371 Returns dictionary of section names -> id mappings
2373 @type session: Session
2374 @param session: Optional SQL session object (a temporary one will be
2375 generated if not supplied)
2378 @return: dictionary of section names -> id mappings
2382 q = session.query(Section)
2384 ret[x.section] = x.section_id
2388 __all__.append('get_sections')
2390 ################################################################################
2392 class SrcContents(ORMObject):
2393 def __init__(self, file = None, source = None):
2395 self.source = source
2397 def properties(self):
2398 return ['file', 'source']
2400 __all__.append('SrcContents')
2402 ################################################################################
2404 from debian.debfile import Deb822
2406 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
2407 class Dak822(Deb822):
2408 def _internal_parser(self, sequence, fields=None):
2409 # The key is non-whitespace, non-colon characters before any colon.
2410 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
2411 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
2412 multi = re.compile(key_part + r"$")
2413 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
2415 wanted_field = lambda f: fields is None or f in fields
2417 if isinstance(sequence, basestring):
2418 sequence = sequence.splitlines()
2422 for line in self.gpg_stripped_paragraph(sequence):
2423 m = single.match(line)
2426 self[curkey] = content
2428 if not wanted_field(m.group('key')):
2432 curkey = m.group('key')
2433 content = m.group('data')
2436 m = multi.match(line)
2439 self[curkey] = content
2441 if not wanted_field(m.group('key')):
2445 curkey = m.group('key')
2449 m = multidata.match(line)
2451 content += '\n' + line # XXX not m.group('data')?
2455 self[curkey] = content
2458 class DBSource(ORMObject):
2459 def __init__(self, source = None, version = None, maintainer = None, \
2460 changedby = None, poolfile = None, install_date = None):
2461 self.source = source
2462 self.version = version
2463 self.maintainer = maintainer
2464 self.changedby = changedby
2465 self.poolfile = poolfile
2466 self.install_date = install_date
2470 return self.source_id
2472 def properties(self):
2473 return ['source', 'source_id', 'maintainer', 'changedby', \
2474 'fingerprint', 'poolfile', 'version', 'suites_count', \
2475 'install_date', 'binaries_count', 'uploaders_count']
2477 def not_null_constraints(self):
2478 return ['source', 'version', 'install_date', 'maintainer', \
2479 'changedby', 'poolfile', 'install_date']
2481 def read_control_fields(self):
2483 Reads the control information from a dsc
2486 @return: fields is the dsc information in a dictionary form
2488 fullpath = self.poolfile.fullpath
2489 fields = Dak822(open(self.poolfile.fullpath, 'r'))
2492 metadata = association_proxy('key', 'value')
2494 def get_component_name(self):
2495 return self.poolfile.location.component.component_name
2497 def scan_contents(self):
2499 Returns a set of names for non directories. The path names are
2500 normalized after converting them from either utf-8 or iso8859-1
2503 fullpath = self.poolfile.fullpath
2504 from daklib.contents import UnpackedSource
2505 unpacked = UnpackedSource(fullpath)
2507 for name in unpacked.get_all_filenames():
2508 # enforce proper utf-8 encoding
2510 name.decode('utf-8')
2511 except UnicodeDecodeError:
2512 name = name.decode('iso8859-1').encode('utf-8')
2516 __all__.append('DBSource')
2519 def source_exists(source, source_version, suites = ["any"], session=None):
2521 Ensure that source exists somewhere in the archive for the binary
2522 upload being processed.
2523 1. exact match => 1.0-3
2524 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2526 @type source: string
2527 @param source: source name
2529 @type source_version: string
2530 @param source_version: expected source version
2533 @param suites: list of suites to check in, default I{any}
2535 @type session: Session
2536 @param session: Optional SQLA session object (a temporary one will be
2537 generated if not supplied)
2540 @return: returns 1 if a source with expected version is found, otherwise 0
2547 from daklib.regexes import re_bin_only_nmu
2548 orig_source_version = re_bin_only_nmu.sub('', source_version)
2550 for suite in suites:
2551 q = session.query(DBSource).filter_by(source=source). \
2552 filter(DBSource.version.in_([source_version, orig_source_version]))
2554 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2555 s = get_suite(suite, session)
2556 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2557 considered_suites = [ vc.reference for vc in enhances_vcs ]
2558 considered_suites.append(s)
2560 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2565 # No source found so return not ok
2570 __all__.append('source_exists')
2573 def get_suites_source_in(source, session=None):
2575 Returns list of Suite objects which given C{source} name is in
2578 @param source: DBSource package name to search for
2581 @return: list of Suite objects for the given source
2584 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2586 __all__.append('get_suites_source_in')
2589 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2591 Returns list of DBSource objects for given C{source} name and other parameters
2594 @param source: DBSource package name to search for
2596 @type version: str or None
2597 @param version: DBSource version name to search for or None if not applicable
2599 @type dm_upload_allowed: bool
2600 @param dm_upload_allowed: If None, no effect. If True or False, only
2601 return packages with that dm_upload_allowed setting
2603 @type session: Session
2604 @param session: Optional SQL session object (a temporary one will be
2605 generated if not supplied)
2608 @return: list of DBSource objects for the given name (may be empty)
2611 q = session.query(DBSource).filter_by(source=source)
2613 if version is not None:
2614 q = q.filter_by(version=version)
2616 if dm_upload_allowed is not None:
2617 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2621 __all__.append('get_sources_from_name')
2623 # FIXME: This function fails badly if it finds more than 1 source package and
2624 # its implementation is trivial enough to be inlined.
2626 def get_source_in_suite(source, suite, session=None):
2628 Returns a DBSource object for a combination of C{source} and C{suite}.
2630 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2631 - B{suite} - a suite name, eg. I{unstable}
2633 @type source: string
2634 @param source: source package name
2637 @param suite: the suite name
2640 @return: the version for I{source} in I{suite}
2644 q = get_suite(suite, session).get_sources(source)
2647 except NoResultFound:
2650 __all__.append('get_source_in_suite')
2653 def import_metadata_into_db(obj, session=None):
2655 This routine works on either DBBinary or DBSource objects and imports
2656 their metadata into the database
2658 fields = obj.read_control_fields()
2659 for k in fields.keys():
2662 val = str(fields[k])
2663 except UnicodeEncodeError:
2664 # Fall back to UTF-8
2666 val = fields[k].encode('utf-8')
2667 except UnicodeEncodeError:
2668 # Finally try iso8859-1
2669 val = fields[k].encode('iso8859-1')
2670 # Otherwise we allow the exception to percolate up and we cause
2671 # a reject as someone is playing silly buggers
2673 obj.metadata[get_or_set_metadatakey(k, session)] = val
2675 session.commit_or_flush()
2677 __all__.append('import_metadata_into_db')
2680 ################################################################################
2682 def split_uploaders(uploaders_list):
2684 Split the Uploaders field into the individual uploaders and yield each of
2685 them. Beware: email addresses might contain commas.
2688 for uploader in re.sub(">[ ]*,", ">\t", uploaders_list).split("\t"):
2689 yield uploader.strip()
2692 def add_dsc_to_db(u, filename, session=None):
2693 entry = u.pkg.files[filename]
2697 source.source = u.pkg.dsc["source"]
2698 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2699 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2700 # If Changed-By isn't available, fall back to maintainer
2701 if u.pkg.changes.has_key("changed-by"):
2702 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2704 source.changedby_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2705 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2706 source.install_date = datetime.now().date()
2708 dsc_component = entry["component"]
2709 dsc_location_id = entry["location id"]
2711 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2713 # Set up a new poolfile if necessary
2714 if not entry.has_key("files id") or not entry["files id"]:
2715 filename = entry["pool name"] + filename
2716 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2718 pfs.append(poolfile)
2719 entry["files id"] = poolfile.file_id
2721 source.poolfile_id = entry["files id"]
2724 suite_names = u.pkg.changes["distribution"].keys()
2725 source.suites = session.query(Suite). \
2726 filter(Suite.suite_name.in_(suite_names)).all()
2728 # Add the source files to the DB (files and dsc_files)
2730 dscfile.source_id = source.source_id
2731 dscfile.poolfile_id = entry["files id"]
2732 session.add(dscfile)
2734 for dsc_file, dentry in u.pkg.dsc_files.items():
2736 df.source_id = source.source_id
2738 # If the .orig tarball is already in the pool, it's
2739 # files id is stored in dsc_files by check_dsc().
2740 files_id = dentry.get("files id", None)
2742 # Find the entry in the files hash
2743 # TODO: Bail out here properly
2745 for f, e in u.pkg.files.items():
2750 if files_id is None:
2751 filename = dfentry["pool name"] + dsc_file
2753 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2754 # FIXME: needs to check for -1/-2 and or handle exception
2755 if found and obj is not None:
2756 files_id = obj.file_id
2759 # If still not found, add it
2760 if files_id is None:
2761 # HACK: Force sha1sum etc into dentry
2762 dentry["sha1sum"] = dfentry["sha1sum"]
2763 dentry["sha256sum"] = dfentry["sha256sum"]
2764 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2765 pfs.append(poolfile)
2766 files_id = poolfile.file_id
2768 poolfile = get_poolfile_by_id(files_id, session)
2769 if poolfile is None:
2770 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2771 pfs.append(poolfile)
2773 df.poolfile_id = files_id
2776 # Add the src_uploaders to the DB
2778 session.refresh(source)
2779 source.uploaders = [source.maintainer]
2780 if u.pkg.dsc.has_key("uploaders"):
2781 for up in split_uploaders(u.pkg.dsc["uploaders"]):
2782 source.uploaders.append(get_or_set_maintainer(up, session))
2786 return source, dsc_component, dsc_location_id, pfs
2788 __all__.append('add_dsc_to_db')
2791 def add_deb_to_db(u, filename, session=None):
2793 Contrary to what you might expect, this routine deals with both
2794 debs and udebs. That info is in 'dbtype', whilst 'type' is
2795 'deb' for both of them
2798 entry = u.pkg.files[filename]
2801 bin.package = entry["package"]
2802 bin.version = entry["version"]
2803 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2804 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2805 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2806 bin.binarytype = entry["dbtype"]
2809 filename = entry["pool name"] + filename
2810 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2811 if not entry.get("location id", None):
2812 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2814 if entry.get("files id", None):
2815 poolfile = get_poolfile_by_id(bin.poolfile_id)
2816 bin.poolfile_id = entry["files id"]
2818 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2819 bin.poolfile_id = entry["files id"] = poolfile.file_id
2822 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2823 if len(bin_sources) != 1:
2824 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2825 (bin.package, bin.version, entry["architecture"],
2826 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2828 bin.source_id = bin_sources[0].source_id
2830 if entry.has_key("built-using"):
2831 for srcname, version in entry["built-using"]:
2832 exsources = get_sources_from_name(srcname, version, session=session)
2833 if len(exsources) != 1:
2834 raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2835 (srcname, version, bin.package, bin.version, entry["architecture"],
2836 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2838 bin.extra_sources.append(exsources[0])
2840 # Add and flush object so it has an ID
2843 suite_names = u.pkg.changes["distribution"].keys()
2844 bin.suites = session.query(Suite). \
2845 filter(Suite.suite_name.in_(suite_names)).all()
2849 # Deal with contents - disabled for now
2850 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2852 # print "REJECT\nCould not determine contents of package %s" % bin.package
2853 # session.rollback()
2854 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2856 return bin, poolfile
2858 __all__.append('add_deb_to_db')
2860 ################################################################################
2862 class SourceACL(object):
2863 def __init__(self, *args, **kwargs):
2867 return '<SourceACL %s>' % self.source_acl_id
2869 __all__.append('SourceACL')
2871 ################################################################################
2873 class SrcFormat(object):
2874 def __init__(self, *args, **kwargs):
2878 return '<SrcFormat %s>' % (self.format_name)
2880 __all__.append('SrcFormat')
2882 ################################################################################
2884 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2885 ('SuiteID', 'suite_id'),
2886 ('Version', 'version'),
2887 ('Origin', 'origin'),
2889 ('Description', 'description'),
2890 ('Untouchable', 'untouchable'),
2891 ('Announce', 'announce'),
2892 ('Codename', 'codename'),
2893 ('OverrideCodename', 'overridecodename'),
2894 ('ValidTime', 'validtime'),
2895 ('Priority', 'priority'),
2896 ('NotAutomatic', 'notautomatic'),
2897 ('CopyChanges', 'copychanges'),
2898 ('OverrideSuite', 'overridesuite')]
2900 # Why the heck don't we have any UNIQUE constraints in table suite?
2901 # TODO: Add UNIQUE constraints for appropriate columns.
2902 class Suite(ORMObject):
2903 def __init__(self, suite_name = None, version = None):
2904 self.suite_name = suite_name
2905 self.version = version
2907 def properties(self):
2908 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2911 def not_null_constraints(self):
2912 return ['suite_name']
2914 def __eq__(self, val):
2915 if isinstance(val, str):
2916 return (self.suite_name == val)
2917 # This signals to use the normal comparison operator
2918 return NotImplemented
2920 def __ne__(self, val):
2921 if isinstance(val, str):
2922 return (self.suite_name != val)
2923 # This signals to use the normal comparison operator
2924 return NotImplemented
2928 for disp, field in SUITE_FIELDS:
2929 val = getattr(self, field, None)
2931 ret.append("%s: %s" % (disp, val))
2933 return "\n".join(ret)
2935 def get_architectures(self, skipsrc=False, skipall=False):
2937 Returns list of Architecture objects
2939 @type skipsrc: boolean
2940 @param skipsrc: Whether to skip returning the 'source' architecture entry
2943 @type skipall: boolean
2944 @param skipall: Whether to skip returning the 'all' architecture entry
2948 @return: list of Architecture objects for the given name (may be empty)
2951 q = object_session(self).query(Architecture).with_parent(self)
2953 q = q.filter(Architecture.arch_string != 'source')
2955 q = q.filter(Architecture.arch_string != 'all')
2956 return q.order_by(Architecture.arch_string).all()
2958 def get_sources(self, source):
2960 Returns a query object representing DBSource that is part of C{suite}.
2962 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2964 @type source: string
2965 @param source: source package name
2967 @rtype: sqlalchemy.orm.query.Query
2968 @return: a query of DBSource
2972 session = object_session(self)
2973 return session.query(DBSource).filter_by(source = source). \
2976 def get_overridesuite(self):
2977 if self.overridesuite is None:
2980 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2982 __all__.append('Suite')
2985 def get_suite(suite, session=None):
2987 Returns Suite object for given C{suite name}.
2990 @param suite: The name of the suite
2992 @type session: Session
2993 @param session: Optional SQLA session object (a temporary one will be
2994 generated if not supplied)
2997 @return: Suite object for the requested suite name (None if not present)
3000 q = session.query(Suite).filter_by(suite_name=suite)
3004 except NoResultFound:
3007 __all__.append('get_suite')
3009 ################################################################################
3011 # TODO: should be removed because the implementation is too trivial
3013 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
3015 Returns list of Architecture objects for given C{suite} name
3018 @param suite: Suite name to search for
3020 @type skipsrc: boolean
3021 @param skipsrc: Whether to skip returning the 'source' architecture entry
3024 @type skipall: boolean
3025 @param skipall: Whether to skip returning the 'all' architecture entry
3028 @type session: Session
3029 @param session: Optional SQL session object (a temporary one will be
3030 generated if not supplied)
3033 @return: list of Architecture objects for the given name (may be empty)
3036 return get_suite(suite, session).get_architectures(skipsrc, skipall)
3038 __all__.append('get_suite_architectures')
3040 ################################################################################
3042 class SuiteSrcFormat(object):
3043 def __init__(self, *args, **kwargs):
3047 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
3049 __all__.append('SuiteSrcFormat')
3052 def get_suite_src_formats(suite, session=None):
3054 Returns list of allowed SrcFormat for C{suite}.
3057 @param suite: Suite name to search for
3059 @type session: Session
3060 @param session: Optional SQL session object (a temporary one will be
3061 generated if not supplied)
3064 @return: the list of allowed source formats for I{suite}
3067 q = session.query(SrcFormat)
3068 q = q.join(SuiteSrcFormat)
3069 q = q.join(Suite).filter_by(suite_name=suite)
3070 q = q.order_by('format_name')
3074 __all__.append('get_suite_src_formats')
3076 ################################################################################
3078 class Uid(ORMObject):
3079 def __init__(self, uid = None, name = None):
3083 def __eq__(self, val):
3084 if isinstance(val, str):
3085 return (self.uid == val)
3086 # This signals to use the normal comparison operator
3087 return NotImplemented
3089 def __ne__(self, val):
3090 if isinstance(val, str):
3091 return (self.uid != val)
3092 # This signals to use the normal comparison operator
3093 return NotImplemented
3095 def properties(self):
3096 return ['uid', 'name', 'fingerprint']
3098 def not_null_constraints(self):
3101 __all__.append('Uid')
3104 def get_or_set_uid(uidname, session=None):
3106 Returns uid object for given uidname.
3108 If no matching uidname is found, a row is inserted.
3110 @type uidname: string
3111 @param uidname: The uid to add
3113 @type session: SQLAlchemy
3114 @param session: Optional SQL session object (a temporary one will be
3115 generated if not supplied). If not passed, a commit will be performed at
3116 the end of the function, otherwise the caller is responsible for commiting.
3119 @return: the uid object for the given uidname
3122 q = session.query(Uid).filter_by(uid=uidname)
3126 except NoResultFound:
3130 session.commit_or_flush()
3135 __all__.append('get_or_set_uid')
3138 def get_uid_from_fingerprint(fpr, session=None):
3139 q = session.query(Uid)
3140 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
3144 except NoResultFound:
3147 __all__.append('get_uid_from_fingerprint')
3149 ################################################################################
3151 class UploadBlock(object):
3152 def __init__(self, *args, **kwargs):
3156 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
3158 __all__.append('UploadBlock')
3160 ################################################################################
3162 class MetadataKey(ORMObject):
3163 def __init__(self, key = None):
3166 def properties(self):
3169 def not_null_constraints(self):
3172 __all__.append('MetadataKey')
3175 def get_or_set_metadatakey(keyname, session=None):
3177 Returns MetadataKey object for given uidname.
3179 If no matching keyname is found, a row is inserted.
3181 @type uidname: string
3182 @param uidname: The keyname to add
3184 @type session: SQLAlchemy
3185 @param session: Optional SQL session object (a temporary one will be
3186 generated if not supplied). If not passed, a commit will be performed at
3187 the end of the function, otherwise the caller is responsible for commiting.
3190 @return: the metadatakey object for the given keyname
3193 q = session.query(MetadataKey).filter_by(key=keyname)
3197 except NoResultFound:
3198 ret = MetadataKey(keyname)
3200 session.commit_or_flush()
3204 __all__.append('get_or_set_metadatakey')
3206 ################################################################################
3208 class BinaryMetadata(ORMObject):
3209 def __init__(self, key = None, value = None, binary = None):
3212 self.binary = binary
3214 def properties(self):
3215 return ['binary', 'key', 'value']
3217 def not_null_constraints(self):
3220 __all__.append('BinaryMetadata')
3222 ################################################################################
3224 class SourceMetadata(ORMObject):
3225 def __init__(self, key = None, value = None, source = None):
3228 self.source = source
3230 def properties(self):
3231 return ['source', 'key', 'value']
3233 def not_null_constraints(self):
3236 __all__.append('SourceMetadata')
3238 ################################################################################
3240 class VersionCheck(ORMObject):
3241 def __init__(self, *args, **kwargs):
3244 def properties(self):
3245 #return ['suite_id', 'check', 'reference_id']
3248 def not_null_constraints(self):
3249 return ['suite', 'check', 'reference']
3251 __all__.append('VersionCheck')
3254 def get_version_checks(suite_name, check = None, session = None):
3255 suite = get_suite(suite_name, session)
3257 # Make sure that what we return is iterable so that list comprehensions
3258 # involving this don't cause a traceback
3260 q = session.query(VersionCheck).filter_by(suite=suite)
3262 q = q.filter_by(check=check)
3265 __all__.append('get_version_checks')
3267 ################################################################################
3269 class DBConn(object):
3271 database module init.
3275 def __init__(self, *args, **kwargs):
3276 self.__dict__ = self.__shared_state
3278 if not getattr(self, 'initialised', False):
3279 self.initialised = True
3280 self.debug = kwargs.has_key('debug')
3283 def __setuptables(self):
3290 'binaries_metadata',
3294 'build_queue_files',
3295 'build_queue_policy_files',
3300 'changes_pending_binaries',
3301 'changes_pending_files',
3302 'changes_pending_source',
3303 'changes_pending_files_map',
3304 'changes_pending_source_files',
3305 'changes_pool_files',
3307 'external_overrides',
3308 'extra_src_references',
3317 # TODO: the maintainer column in table override should be removed.
3331 'suite_architectures',
3332 'suite_build_queue_copy',
3333 'suite_src_formats',
3340 'almost_obsolete_all_associations',
3341 'almost_obsolete_src_associations',
3342 'any_associations_source',
3343 'bin_associations_binaries',
3344 'binaries_suite_arch',
3345 'binfiles_suite_component_arch',
3348 'newest_all_associations',
3349 'newest_any_associations',
3351 'newest_src_association',
3352 'obsolete_all_associations',
3353 'obsolete_any_associations',
3354 'obsolete_any_by_all_associations',
3355 'obsolete_src_associations',
3357 'src_associations_bin',
3358 'src_associations_src',
3359 'suite_arch_by_name',
3362 for table_name in tables:
3363 table = Table(table_name, self.db_meta, \
3364 autoload=True, useexisting=True)
3365 setattr(self, 'tbl_%s' % table_name, table)
3367 for view_name in views:
3368 view = Table(view_name, self.db_meta, autoload=True)
3369 setattr(self, 'view_%s' % view_name, view)
3371 def __setupmappers(self):
3372 mapper(Architecture, self.tbl_architecture,
3373 properties = dict(arch_id = self.tbl_architecture.c.id,
3374 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3375 order_by='suite_name',
3376 backref=backref('architectures', order_by='arch_string'))),
3377 extension = validator)
3379 mapper(Archive, self.tbl_archive,
3380 properties = dict(archive_id = self.tbl_archive.c.id,
3381 archive_name = self.tbl_archive.c.name))
3383 mapper(BuildQueue, self.tbl_build_queue,
3384 properties = dict(queue_id = self.tbl_build_queue.c.id))
3386 mapper(BuildQueueFile, self.tbl_build_queue_files,
3387 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3388 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3390 mapper(BuildQueuePolicyFile, self.tbl_build_queue_policy_files,
3392 build_queue = relation(BuildQueue, backref='policy_queue_files'),
3393 file = relation(ChangePendingFile, lazy='joined')))
3395 mapper(DBBinary, self.tbl_binaries,
3396 properties = dict(binary_id = self.tbl_binaries.c.id,
3397 package = self.tbl_binaries.c.package,
3398 version = self.tbl_binaries.c.version,
3399 maintainer_id = self.tbl_binaries.c.maintainer,
3400 maintainer = relation(Maintainer),
3401 source_id = self.tbl_binaries.c.source,
3402 source = relation(DBSource, backref='binaries'),
3403 arch_id = self.tbl_binaries.c.architecture,
3404 architecture = relation(Architecture),
3405 poolfile_id = self.tbl_binaries.c.file,
3406 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3407 binarytype = self.tbl_binaries.c.type,
3408 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3409 fingerprint = relation(Fingerprint),
3410 install_date = self.tbl_binaries.c.install_date,
3411 suites = relation(Suite, secondary=self.tbl_bin_associations,
3412 backref=backref('binaries', lazy='dynamic')),
3413 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3414 backref=backref('extra_binary_references', lazy='dynamic')),
3415 key = relation(BinaryMetadata, cascade='all',
3416 collection_class=attribute_mapped_collection('key'))),
3417 extension = validator)
3419 mapper(BinaryACL, self.tbl_binary_acl,
3420 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3422 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3423 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3424 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3425 architecture = relation(Architecture)))
3427 mapper(Component, self.tbl_component,
3428 properties = dict(component_id = self.tbl_component.c.id,
3429 component_name = self.tbl_component.c.name),
3430 extension = validator)
3432 mapper(DBConfig, self.tbl_config,
3433 properties = dict(config_id = self.tbl_config.c.id))
3435 mapper(DSCFile, self.tbl_dsc_files,
3436 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3437 source_id = self.tbl_dsc_files.c.source,
3438 source = relation(DBSource),
3439 poolfile_id = self.tbl_dsc_files.c.file,
3440 poolfile = relation(PoolFile)))
3442 mapper(ExternalOverride, self.tbl_external_overrides,
3444 suite_id = self.tbl_external_overrides.c.suite,
3445 suite = relation(Suite),
3446 component_id = self.tbl_external_overrides.c.component,
3447 component = relation(Component)))
3449 mapper(PoolFile, self.tbl_files,
3450 properties = dict(file_id = self.tbl_files.c.id,
3451 filesize = self.tbl_files.c.size,
3452 location_id = self.tbl_files.c.location,
3453 location = relation(Location,
3454 # using lazy='dynamic' in the back
3455 # reference because we have A LOT of
3456 # files in one location
3457 backref=backref('files', lazy='dynamic'))),
3458 extension = validator)
3460 mapper(Fingerprint, self.tbl_fingerprint,
3461 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3462 uid_id = self.tbl_fingerprint.c.uid,
3463 uid = relation(Uid),
3464 keyring_id = self.tbl_fingerprint.c.keyring,
3465 keyring = relation(Keyring),
3466 source_acl = relation(SourceACL),
3467 binary_acl = relation(BinaryACL)),
3468 extension = validator)
3470 mapper(Keyring, self.tbl_keyrings,
3471 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3472 keyring_id = self.tbl_keyrings.c.id))
3474 mapper(DBChange, self.tbl_changes,
3475 properties = dict(change_id = self.tbl_changes.c.id,
3476 poolfiles = relation(PoolFile,
3477 secondary=self.tbl_changes_pool_files,
3478 backref="changeslinks"),
3479 seen = self.tbl_changes.c.seen,
3480 source = self.tbl_changes.c.source,
3481 binaries = self.tbl_changes.c.binaries,
3482 architecture = self.tbl_changes.c.architecture,
3483 distribution = self.tbl_changes.c.distribution,
3484 urgency = self.tbl_changes.c.urgency,
3485 maintainer = self.tbl_changes.c.maintainer,
3486 changedby = self.tbl_changes.c.changedby,
3487 date = self.tbl_changes.c.date,
3488 version = self.tbl_changes.c.version,
3489 files = relation(ChangePendingFile,
3490 secondary=self.tbl_changes_pending_files_map,
3491 backref="changesfile"),
3492 in_queue_id = self.tbl_changes.c.in_queue,
3493 in_queue = relation(PolicyQueue,
3494 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3495 approved_for_id = self.tbl_changes.c.approved_for))
3497 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3498 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3500 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3501 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3502 filename = self.tbl_changes_pending_files.c.filename,
3503 size = self.tbl_changes_pending_files.c.size,
3504 md5sum = self.tbl_changes_pending_files.c.md5sum,
3505 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3506 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3508 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3509 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3510 change = relation(DBChange),
3511 maintainer = relation(Maintainer,
3512 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3513 changedby = relation(Maintainer,
3514 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3515 fingerprint = relation(Fingerprint),
3516 source_files = relation(ChangePendingFile,
3517 secondary=self.tbl_changes_pending_source_files,
3518 backref="pending_sources")))
3521 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3522 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3523 keyring = relation(Keyring, backref="keyring_acl_map"),
3524 architecture = relation(Architecture)))
3526 mapper(Location, self.tbl_location,
3527 properties = dict(location_id = self.tbl_location.c.id,
3528 component_id = self.tbl_location.c.component,
3529 component = relation(Component, backref='location'),
3530 archive_id = self.tbl_location.c.archive,
3531 archive = relation(Archive),
3532 # FIXME: the 'type' column is old cruft and
3533 # should be removed in the future.
3534 archive_type = self.tbl_location.c.type),
3535 extension = validator)
3537 mapper(Maintainer, self.tbl_maintainer,
3538 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3539 maintains_sources = relation(DBSource, backref='maintainer',
3540 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3541 changed_sources = relation(DBSource, backref='changedby',
3542 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3543 extension = validator)
3545 mapper(NewComment, self.tbl_new_comments,
3546 properties = dict(comment_id = self.tbl_new_comments.c.id))
3548 mapper(Override, self.tbl_override,
3549 properties = dict(suite_id = self.tbl_override.c.suite,
3550 suite = relation(Suite, \
3551 backref=backref('overrides', lazy='dynamic')),
3552 package = self.tbl_override.c.package,
3553 component_id = self.tbl_override.c.component,
3554 component = relation(Component, \
3555 backref=backref('overrides', lazy='dynamic')),
3556 priority_id = self.tbl_override.c.priority,
3557 priority = relation(Priority, \
3558 backref=backref('overrides', lazy='dynamic')),
3559 section_id = self.tbl_override.c.section,
3560 section = relation(Section, \
3561 backref=backref('overrides', lazy='dynamic')),
3562 overridetype_id = self.tbl_override.c.type,
3563 overridetype = relation(OverrideType, \
3564 backref=backref('overrides', lazy='dynamic'))))
3566 mapper(OverrideType, self.tbl_override_type,
3567 properties = dict(overridetype = self.tbl_override_type.c.type,
3568 overridetype_id = self.tbl_override_type.c.id))
3570 mapper(PolicyQueue, self.tbl_policy_queue,
3571 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3573 mapper(Priority, self.tbl_priority,
3574 properties = dict(priority_id = self.tbl_priority.c.id))
3576 mapper(Section, self.tbl_section,
3577 properties = dict(section_id = self.tbl_section.c.id,
3578 section=self.tbl_section.c.section))
3580 mapper(DBSource, self.tbl_source,
3581 properties = dict(source_id = self.tbl_source.c.id,
3582 version = self.tbl_source.c.version,
3583 maintainer_id = self.tbl_source.c.maintainer,
3584 poolfile_id = self.tbl_source.c.file,
3585 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3586 fingerprint_id = self.tbl_source.c.sig_fpr,
3587 fingerprint = relation(Fingerprint),
3588 changedby_id = self.tbl_source.c.changedby,
3589 srcfiles = relation(DSCFile,
3590 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3591 suites = relation(Suite, secondary=self.tbl_src_associations,
3592 backref=backref('sources', lazy='dynamic')),
3593 uploaders = relation(Maintainer,
3594 secondary=self.tbl_src_uploaders),
3595 key = relation(SourceMetadata, cascade='all',
3596 collection_class=attribute_mapped_collection('key'))),
3597 extension = validator)
3599 mapper(SourceACL, self.tbl_source_acl,
3600 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3602 mapper(SrcFormat, self.tbl_src_format,
3603 properties = dict(src_format_id = self.tbl_src_format.c.id,
3604 format_name = self.tbl_src_format.c.format_name))
3606 mapper(Suite, self.tbl_suite,
3607 properties = dict(suite_id = self.tbl_suite.c.id,
3608 policy_queue = relation(PolicyQueue),
3609 copy_queues = relation(BuildQueue,
3610 secondary=self.tbl_suite_build_queue_copy)),
3611 extension = validator)
3613 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3614 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3615 suite = relation(Suite, backref='suitesrcformats'),
3616 src_format_id = self.tbl_suite_src_formats.c.src_format,
3617 src_format = relation(SrcFormat)))
3619 mapper(Uid, self.tbl_uid,
3620 properties = dict(uid_id = self.tbl_uid.c.id,
3621 fingerprint = relation(Fingerprint)),
3622 extension = validator)
3624 mapper(UploadBlock, self.tbl_upload_blocks,
3625 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3626 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3627 uid = relation(Uid, backref="uploadblocks")))
3629 mapper(BinContents, self.tbl_bin_contents,
3631 binary = relation(DBBinary,
3632 backref=backref('contents', lazy='dynamic', cascade='all')),
3633 file = self.tbl_bin_contents.c.file))
3635 mapper(SrcContents, self.tbl_src_contents,
3637 source = relation(DBSource,
3638 backref=backref('contents', lazy='dynamic', cascade='all')),
3639 file = self.tbl_src_contents.c.file))
3641 mapper(MetadataKey, self.tbl_metadata_keys,
3643 key_id = self.tbl_metadata_keys.c.key_id,
3644 key = self.tbl_metadata_keys.c.key))
3646 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3648 binary_id = self.tbl_binaries_metadata.c.bin_id,
3649 binary = relation(DBBinary),
3650 key_id = self.tbl_binaries_metadata.c.key_id,
3651 key = relation(MetadataKey),
3652 value = self.tbl_binaries_metadata.c.value))
3654 mapper(SourceMetadata, self.tbl_source_metadata,
3656 source_id = self.tbl_source_metadata.c.src_id,
3657 source = relation(DBSource),
3658 key_id = self.tbl_source_metadata.c.key_id,
3659 key = relation(MetadataKey),
3660 value = self.tbl_source_metadata.c.value))
3662 mapper(VersionCheck, self.tbl_version_check,
3664 suite_id = self.tbl_version_check.c.suite,
3665 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
3666 reference_id = self.tbl_version_check.c.reference,
3667 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
3669 ## Connection functions
3670 def __createconn(self):
3671 from config import Config
3673 if cnf.has_key("DB::Service"):
3674 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3675 elif cnf.has_key("DB::Host"):
3677 connstr = "postgresql://%s" % cnf["DB::Host"]
3678 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3679 connstr += ":%s" % cnf["DB::Port"]
3680 connstr += "/%s" % cnf["DB::Name"]
3683 connstr = "postgresql:///%s" % cnf["DB::Name"]
3684 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3685 connstr += "?port=%s" % cnf["DB::Port"]
3687 engine_args = { 'echo': self.debug }
3688 if cnf.has_key('DB::PoolSize'):
3689 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3690 if cnf.has_key('DB::MaxOverflow'):
3691 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3692 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3693 cnf['DB::Unicode'] == 'false':
3694 engine_args['use_native_unicode'] = False
3696 # Monkey patch a new dialect in in order to support service= syntax
3697 import sqlalchemy.dialects.postgresql
3698 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3699 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3700 def create_connect_args(self, url):
3701 if str(url).startswith('postgresql://service='):
3703 servicename = str(url)[21:]
3704 return (['service=%s' % servicename], {})
3706 return PGDialect_psycopg2.create_connect_args(self, url)
3708 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3710 self.db_pg = create_engine(connstr, **engine_args)
3711 self.db_meta = MetaData()
3712 self.db_meta.bind = self.db_pg
3713 self.db_smaker = sessionmaker(bind=self.db_pg,
3717 self.__setuptables()
3718 self.__setupmappers()
3719 self.pid = os.getpid()
3721 def session(self, work_mem = 0):
3723 Returns a new session object. If a work_mem parameter is provided a new
3724 transaction is started and the work_mem parameter is set for this
3725 transaction. The work_mem parameter is measured in MB. A default value
3726 will be used if the parameter is not set.
3728 # reinitialize DBConn in new processes
3729 if self.pid != os.getpid():
3732 session = self.db_smaker()
3734 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
3737 __all__.append('DBConn')