5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
45 from daklib.gpg import SignedFile
52 import simplejson as json
54 from datetime import datetime, timedelta
55 from errno import ENOENT
56 from tempfile import mkstemp, mkdtemp
57 from subprocess import Popen, PIPE
58 from tarfile import TarFile
60 from inspect import getargspec
63 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
65 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
66 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
67 from sqlalchemy import types as sqltypes
68 from sqlalchemy.orm.collections import attribute_mapped_collection
69 from sqlalchemy.ext.associationproxy import association_proxy
71 # Don't remove this, we re-export the exceptions to scripts which import us
72 from sqlalchemy.exc import *
73 from sqlalchemy.orm.exc import NoResultFound
75 # Only import Config until Queue stuff is changed to store its config
77 from config import Config
78 from textutils import fix_maintainer
79 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
81 # suppress some deprecation warnings in squeeze related to sqlalchemy
83 warnings.filterwarnings('ignore', \
84 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
86 warnings.filterwarnings('ignore', \
87 "Predicate of partial index .* ignored during reflection", \
91 ################################################################################
93 # Patch in support for the debversion field type so that it works during
97 # that is for sqlalchemy 0.6
98 UserDefinedType = sqltypes.UserDefinedType
100 # this one for sqlalchemy 0.5
101 UserDefinedType = sqltypes.TypeEngine
103 class DebVersion(UserDefinedType):
104 def get_col_spec(self):
107 def bind_processor(self, dialect):
110 # ' = None' is needed for sqlalchemy 0.5:
111 def result_processor(self, dialect, coltype = None):
114 sa_major_version = sqlalchemy.__version__[0:3]
115 if sa_major_version in ["0.5", "0.6", "0.7"]:
116 from sqlalchemy.databases import postgres
117 postgres.ischema_names['debversion'] = DebVersion
119 raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py")
121 ################################################################################
123 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
125 ################################################################################
127 def session_wrapper(fn):
129 Wrapper around common ".., session=None):" handling. If the wrapped
130 function is called without passing 'session', we create a local one
131 and destroy it when the function ends.
133 Also attaches a commit_or_flush method to the session; if we created a
134 local session, this is a synonym for session.commit(), otherwise it is a
135 synonym for session.flush().
138 def wrapped(*args, **kwargs):
139 private_transaction = False
141 # Find the session object
142 session = kwargs.get('session')
145 if len(args) <= len(getargspec(fn)[0]) - 1:
146 # No session specified as last argument or in kwargs
147 private_transaction = True
148 session = kwargs['session'] = DBConn().session()
150 # Session is last argument in args
154 session = args[-1] = DBConn().session()
155 private_transaction = True
157 if private_transaction:
158 session.commit_or_flush = session.commit
160 session.commit_or_flush = session.flush
163 return fn(*args, **kwargs)
165 if private_transaction:
166 # We created a session; close it.
169 wrapped.__doc__ = fn.__doc__
170 wrapped.func_name = fn.func_name
174 __all__.append('session_wrapper')
176 ################################################################################
178 class ORMObject(object):
180 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
181 derived classes must implement the properties() method.
184 def properties(self):
186 This method should be implemented by all derived classes and returns a
187 list of the important properties. The properties 'created' and
188 'modified' will be added automatically. A suffix '_count' should be
189 added to properties that are lists or query objects. The most important
190 property name should be returned as the first element in the list
191 because it is used by repr().
197 Returns a JSON representation of the object based on the properties
198 returned from the properties() method.
201 # add created and modified
202 all_properties = self.properties() + ['created', 'modified']
203 for property in all_properties:
204 # check for list or query
205 if property[-6:] == '_count':
206 real_property = property[:-6]
207 if not hasattr(self, real_property):
209 value = getattr(self, real_property)
210 if hasattr(value, '__len__'):
213 elif hasattr(value, 'count'):
214 # query (but not during validation)
215 if self.in_validation:
217 value = value.count()
219 raise KeyError('Do not understand property %s.' % property)
221 if not hasattr(self, property):
224 value = getattr(self, property)
228 elif isinstance(value, ORMObject):
229 # use repr() for ORMObject types
232 # we want a string for all other types because json cannot
235 data[property] = value
236 return json.dumps(data)
240 Returns the name of the class.
242 return type(self).__name__
246 Returns a short string representation of the object using the first
247 element from the properties() method.
249 primary_property = self.properties()[0]
250 value = getattr(self, primary_property)
251 return '<%s %s>' % (self.classname(), str(value))
255 Returns a human readable form of the object using the properties()
258 return '<%s %s>' % (self.classname(), self.json())
260 def not_null_constraints(self):
262 Returns a list of properties that must be not NULL. Derived classes
263 should override this method if needed.
267 validation_message = \
268 "Validation failed because property '%s' must not be empty in object\n%s"
270 in_validation = False
274 This function validates the not NULL constraints as returned by
275 not_null_constraints(). It raises the DBUpdateError exception if
278 for property in self.not_null_constraints():
279 # TODO: It is a bit awkward that the mapper configuration allow
280 # directly setting the numeric _id columns. We should get rid of it
282 if hasattr(self, property + '_id') and \
283 getattr(self, property + '_id') is not None:
285 if not hasattr(self, property) or getattr(self, property) is None:
286 # str() might lead to races due to a 2nd flush
287 self.in_validation = True
288 message = self.validation_message % (property, str(self))
289 self.in_validation = False
290 raise DBUpdateError(message)
294 def get(cls, primary_key, session = None):
296 This is a support function that allows getting an object by its primary
299 Architecture.get(3[, session])
301 instead of the more verbose
303 session.query(Architecture).get(3)
305 return session.query(cls).get(primary_key)
307 def session(self, replace = False):
309 Returns the current session that is associated with the object. May
310 return None is object is in detached state.
313 return object_session(self)
315 def clone(self, session = None):
317 Clones the current object in a new session and returns the new clone. A
318 fresh session is created if the optional session parameter is not
319 provided. The function will fail if a session is provided and has
322 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
323 an existing object to allow several threads to work with their own
324 instances of an ORMObject.
326 WARNING: Only persistent (committed) objects can be cloned. Changes
327 made to the original object that are not committed yet will get lost.
328 The session of the new object will always be rolled back to avoid
332 if self.session() is None:
333 raise RuntimeError( \
334 'Method clone() failed for detached object:\n%s' % self)
335 self.session().flush()
336 mapper = object_mapper(self)
337 primary_key = mapper.primary_key_from_instance(self)
338 object_class = self.__class__
340 session = DBConn().session()
341 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
342 raise RuntimeError( \
343 'Method clone() failed due to unflushed changes in session.')
344 new_object = session.query(object_class).get(primary_key)
346 if new_object is None:
347 raise RuntimeError( \
348 'Method clone() failed for non-persistent object:\n%s' % self)
351 __all__.append('ORMObject')
353 ################################################################################
355 class Validator(MapperExtension):
357 This class calls the validate() method for each instance for the
358 'before_update' and 'before_insert' events. A global object validator is
359 used for configuring the individual mappers.
362 def before_update(self, mapper, connection, instance):
366 def before_insert(self, mapper, connection, instance):
370 validator = Validator()
372 ################################################################################
374 class Architecture(ORMObject):
375 def __init__(self, arch_string = None, description = None):
376 self.arch_string = arch_string
377 self.description = description
379 def __eq__(self, val):
380 if isinstance(val, str):
381 return (self.arch_string== val)
382 # This signals to use the normal comparison operator
383 return NotImplemented
385 def __ne__(self, val):
386 if isinstance(val, str):
387 return (self.arch_string != val)
388 # This signals to use the normal comparison operator
389 return NotImplemented
391 def properties(self):
392 return ['arch_string', 'arch_id', 'suites_count']
394 def not_null_constraints(self):
395 return ['arch_string']
397 __all__.append('Architecture')
400 def get_architecture(architecture, session=None):
402 Returns database id for given C{architecture}.
404 @type architecture: string
405 @param architecture: The name of the architecture
407 @type session: Session
408 @param session: Optional SQLA session object (a temporary one will be
409 generated if not supplied)
412 @return: Architecture object for the given arch (None if not present)
415 q = session.query(Architecture).filter_by(arch_string=architecture)
419 except NoResultFound:
422 __all__.append('get_architecture')
424 # TODO: should be removed because the implementation is too trivial
426 def get_architecture_suites(architecture, session=None):
428 Returns list of Suite objects for given C{architecture} name
430 @type architecture: str
431 @param architecture: Architecture name to search for
433 @type session: Session
434 @param session: Optional SQL session object (a temporary one will be
435 generated if not supplied)
438 @return: list of Suite objects for the given name (may be empty)
441 return get_architecture(architecture, session).suites
443 __all__.append('get_architecture_suites')
445 ################################################################################
447 class Archive(object):
448 def __init__(self, *args, **kwargs):
452 return '<Archive %s>' % self.archive_name
454 __all__.append('Archive')
457 def get_archive(archive, session=None):
459 returns database id for given C{archive}.
461 @type archive: string
462 @param archive: the name of the arhive
464 @type session: Session
465 @param session: Optional SQLA session object (a temporary one will be
466 generated if not supplied)
469 @return: Archive object for the given name (None if not present)
472 archive = archive.lower()
474 q = session.query(Archive).filter_by(archive_name=archive)
478 except NoResultFound:
481 __all__.append('get_archive')
483 ################################################################################
485 class BinContents(ORMObject):
486 def __init__(self, file = None, binary = None):
490 def properties(self):
491 return ['file', 'binary']
493 __all__.append('BinContents')
495 ################################################################################
497 def subprocess_setup():
498 # Python installs a SIGPIPE handler by default. This is usually not what
499 # non-Python subprocesses expect.
500 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
502 class DBBinary(ORMObject):
503 def __init__(self, package = None, source = None, version = None, \
504 maintainer = None, architecture = None, poolfile = None, \
505 binarytype = 'deb', fingerprint=None):
506 self.package = package
508 self.version = version
509 self.maintainer = maintainer
510 self.architecture = architecture
511 self.poolfile = poolfile
512 self.binarytype = binarytype
513 self.fingerprint = fingerprint
517 return self.binary_id
519 def properties(self):
520 return ['package', 'version', 'maintainer', 'source', 'architecture', \
521 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
522 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
524 def not_null_constraints(self):
525 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
528 metadata = association_proxy('key', 'value')
530 def get_component_name(self):
531 return self.poolfile.location.component.component_name
533 def scan_contents(self):
535 Yields the contents of the package. Only regular files are yielded and
536 the path names are normalized after converting them from either utf-8
537 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
538 package does not contain any regular file.
540 fullpath = self.poolfile.fullpath
541 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
542 preexec_fn = subprocess_setup)
543 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
544 for member in tar.getmembers():
545 if not member.isdir():
546 name = normpath(member.name)
547 # enforce proper utf-8 encoding
550 except UnicodeDecodeError:
551 name = name.decode('iso8859-1').encode('utf-8')
557 def read_control(self):
559 Reads the control information from a binary.
562 @return: stanza text of the control section.
565 fullpath = self.poolfile.fullpath
566 deb_file = open(fullpath, 'r')
567 stanza = utils.deb_extract_control(deb_file)
572 def read_control_fields(self):
574 Reads the control information from a binary and return
578 @return: fields of the control section as a dictionary.
581 stanza = self.read_control()
582 return apt_pkg.TagSection(stanza)
584 __all__.append('DBBinary')
587 def get_suites_binary_in(package, session=None):
589 Returns list of Suite objects which given C{package} name is in
592 @param package: DBBinary package name to search for
595 @return: list of Suite objects for the given package
598 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
600 __all__.append('get_suites_binary_in')
603 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
605 Returns the component name of the newest binary package in suite_list or
606 None if no package is found. The result can be optionally filtered by a list
607 of architecture names.
610 @param package: DBBinary package name to search for
612 @type suite_list: list of str
613 @param suite_list: list of suite_name items
615 @type arch_list: list of str
616 @param arch_list: optional list of arch_string items that defaults to []
618 @rtype: str or NoneType
619 @return: name of component or None
622 q = session.query(DBBinary).filter_by(package = package). \
623 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
624 if len(arch_list) > 0:
625 q = q.join(DBBinary.architecture). \
626 filter(Architecture.arch_string.in_(arch_list))
627 binary = q.order_by(desc(DBBinary.version)).first()
631 return binary.get_component_name()
633 __all__.append('get_component_by_package_suite')
635 ################################################################################
637 class BinaryACL(object):
638 def __init__(self, *args, **kwargs):
642 return '<BinaryACL %s>' % self.binary_acl_id
644 __all__.append('BinaryACL')
646 ################################################################################
648 class BinaryACLMap(object):
649 def __init__(self, *args, **kwargs):
653 return '<BinaryACLMap %s>' % self.binary_acl_map_id
655 __all__.append('BinaryACLMap')
657 ################################################################################
662 ArchiveDir "%(archivepath)s";
663 OverrideDir "%(overridedir)s";
664 CacheDir "%(cachedir)s";
669 Packages::Compress ". bzip2 gzip";
670 Sources::Compress ". bzip2 gzip";
675 bindirectory "incoming"
680 BinOverride "override.sid.all3";
681 BinCacheDB "packages-accepted.db";
683 FileList "%(filelist)s";
686 Packages::Extensions ".deb .udeb";
689 bindirectory "incoming/"
692 BinOverride "override.sid.all3";
693 SrcOverride "override.sid.all3.src";
694 FileList "%(filelist)s";
698 class BuildQueue(object):
699 def __init__(self, *args, **kwargs):
703 return '<BuildQueue %s>' % self.queue_name
705 def write_metadata(self, starttime, force=False):
706 # Do we write out metafiles?
707 if not (force or self.generate_metadata):
710 session = DBConn().session().object_session(self)
712 fl_fd = fl_name = ac_fd = ac_name = None
714 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
715 startdir = os.getcwd()
718 # Grab files we want to include
719 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
720 newer += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
721 # Write file list with newer files
722 (fl_fd, fl_name) = mkstemp()
724 os.write(fl_fd, '%s\n' % n.fullpath)
729 # Write minimal apt.conf
730 # TODO: Remove hardcoding from template
731 (ac_fd, ac_name) = mkstemp()
732 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
734 'cachedir': cnf["Dir::Cache"],
735 'overridedir': cnf["Dir::Override"],
739 # Run apt-ftparchive generate
740 os.chdir(os.path.dirname(ac_name))
741 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
743 # Run apt-ftparchive release
744 # TODO: Eww - fix this
745 bname = os.path.basename(self.path)
749 # We have to remove the Release file otherwise it'll be included in the
752 os.unlink(os.path.join(bname, 'Release'))
756 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
758 # Crude hack with open and append, but this whole section is and should be redone.
759 if self.notautomatic:
760 release=open("Release", "a")
761 release.write("NotAutomatic: yes\n")
766 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
767 if cnf.has_key("Dinstall::SigningPubKeyring"):
768 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
770 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
772 # Move the files if we got this far
773 os.rename('Release', os.path.join(bname, 'Release'))
775 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
777 # Clean up any left behind files
804 def clean_and_update(self, starttime, Logger, dryrun=False):
805 """WARNING: This routine commits for you"""
806 session = DBConn().session().object_session(self)
808 if self.generate_metadata and not dryrun:
809 self.write_metadata(starttime)
811 # Grab files older than our execution time
812 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
813 older += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
819 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
821 Logger.log(["I: Removing %s from the queue" % o.fullpath])
822 os.unlink(o.fullpath)
825 # If it wasn't there, don't worry
826 if e.errno == ENOENT:
829 # TODO: Replace with proper logging call
830 Logger.log(["E: Could not remove %s" % o.fullpath])
837 for f in os.listdir(self.path):
838 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
841 if not self.contains_filename(f):
842 fp = os.path.join(self.path, f)
844 Logger.log(["I: Would remove unused link %s" % fp])
846 Logger.log(["I: Removing unused link %s" % fp])
850 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
852 def contains_filename(self, filename):
855 @returns True if filename is supposed to be in the queue; False otherwise
857 session = DBConn().session().object_session(self)
858 if session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id, filename = filename).count() > 0:
860 elif session.query(BuildQueuePolicyFile).filter_by(build_queue = self, filename = filename).count() > 0:
864 def add_file_from_pool(self, poolfile):
865 """Copies a file into the pool. Assumes that the PoolFile object is
866 attached to the same SQLAlchemy session as the Queue object is.
868 The caller is responsible for committing after calling this function."""
869 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
871 # Check if we have a file of this name or this ID already
872 for f in self.queuefiles:
873 if (f.fileid is not None and f.fileid == poolfile.file_id) or \
874 (f.poolfile is not None and f.poolfile.filename == poolfile_basename):
875 # In this case, update the BuildQueueFile entry so we
876 # don't remove it too early
877 f.lastused = datetime.now()
878 DBConn().session().object_session(poolfile).add(f)
881 # Prepare BuildQueueFile object
882 qf = BuildQueueFile()
883 qf.build_queue_id = self.queue_id
884 qf.filename = poolfile_basename
886 targetpath = poolfile.fullpath
887 queuepath = os.path.join(self.path, poolfile_basename)
891 # We need to copy instead of symlink
893 utils.copy(targetpath, queuepath)
894 # NULL in the fileid field implies a copy
897 os.symlink(targetpath, queuepath)
898 qf.fileid = poolfile.file_id
899 except FileExistsError:
900 if not poolfile.identical_to(queuepath):
905 # Get the same session as the PoolFile is using and add the qf to it
906 DBConn().session().object_session(poolfile).add(qf)
910 def add_changes_from_policy_queue(self, policyqueue, changes):
912 Copies a changes from a policy queue together with its poolfiles.
914 @type policyqueue: PolicyQueue
915 @param policyqueue: policy queue to copy the changes from
917 @type changes: DBChange
918 @param changes: changes to copy to this build queue
920 for policyqueuefile in changes.files:
921 self.add_file_from_policy_queue(policyqueue, policyqueuefile)
922 for poolfile in changes.poolfiles:
923 self.add_file_from_pool(poolfile)
925 def add_file_from_policy_queue(self, policyqueue, policyqueuefile):
927 Copies a file from a policy queue.
928 Assumes that the policyqueuefile is attached to the same SQLAlchemy
929 session as the Queue object is. The caller is responsible for
930 committing after calling this function.
932 @type policyqueue: PolicyQueue
933 @param policyqueue: policy queue to copy the file from
935 @type policyqueuefile: ChangePendingFile
936 @param policyqueuefile: file to be added to the build queue
938 session = DBConn().session().object_session(policyqueuefile)
940 # Is the file already there?
942 f = session.query(BuildQueuePolicyFile).filter_by(build_queue=self, file=policyqueuefile).one()
943 f.lastused = datetime.now()
945 except NoResultFound:
946 pass # continue below
948 # We have to add the file.
949 f = BuildQueuePolicyFile()
951 f.file = policyqueuefile
952 f.filename = policyqueuefile.filename
954 source = os.path.join(policyqueue.path, policyqueuefile.filename)
957 # Always copy files from policy queues as they might move around.
959 utils.copy(source, target)
960 except FileExistsError:
961 if not policyqueuefile.identical_to(target):
969 __all__.append('BuildQueue')
972 def get_build_queue(queuename, session=None):
974 Returns BuildQueue object for given C{queue name}, creating it if it does not
977 @type queuename: string
978 @param queuename: The name of the queue
980 @type session: Session
981 @param session: Optional SQLA session object (a temporary one will be
982 generated if not supplied)
985 @return: BuildQueue object for the given queue
988 q = session.query(BuildQueue).filter_by(queue_name=queuename)
992 except NoResultFound:
995 __all__.append('get_build_queue')
997 ################################################################################
999 class BuildQueueFile(object):
1001 BuildQueueFile represents a file in a build queue coming from a pool.
1004 def __init__(self, *args, **kwargs):
1008 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
1012 return os.path.join(self.buildqueue.path, self.filename)
1015 __all__.append('BuildQueueFile')
1017 ################################################################################
1019 class BuildQueuePolicyFile(object):
1021 BuildQueuePolicyFile represents a file in a build queue that comes from a
1022 policy queue (and not a pool).
1025 def __init__(self, *args, **kwargs):
1029 #def filename(self):
1030 # return self.file.filename
1034 return os.path.join(self.build_queue.path, self.filename)
1036 __all__.append('BuildQueuePolicyFile')
1038 ################################################################################
1040 class ChangePendingBinary(object):
1041 def __init__(self, *args, **kwargs):
1045 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
1047 __all__.append('ChangePendingBinary')
1049 ################################################################################
1051 class ChangePendingFile(object):
1052 def __init__(self, *args, **kwargs):
1056 return '<ChangePendingFile %s>' % self.change_pending_file_id
1058 def identical_to(self, filename):
1060 compare size and hash with the given file
1063 @return: true if the given file has the same size and hash as this object; false otherwise
1065 st = os.stat(filename)
1066 if self.size != st.st_size:
1069 f = open(filename, "r")
1070 sha256sum = apt_pkg.sha256sum(f)
1071 if sha256sum != self.sha256sum:
1076 __all__.append('ChangePendingFile')
1078 ################################################################################
1080 class ChangePendingSource(object):
1081 def __init__(self, *args, **kwargs):
1085 return '<ChangePendingSource %s>' % self.change_pending_source_id
1087 __all__.append('ChangePendingSource')
1089 ################################################################################
1091 class Component(ORMObject):
1092 def __init__(self, component_name = None):
1093 self.component_name = component_name
1095 def __eq__(self, val):
1096 if isinstance(val, str):
1097 return (self.component_name == val)
1098 # This signals to use the normal comparison operator
1099 return NotImplemented
1101 def __ne__(self, val):
1102 if isinstance(val, str):
1103 return (self.component_name != val)
1104 # This signals to use the normal comparison operator
1105 return NotImplemented
1107 def properties(self):
1108 return ['component_name', 'component_id', 'description', \
1109 'location_count', 'meets_dfsg', 'overrides_count']
1111 def not_null_constraints(self):
1112 return ['component_name']
1115 __all__.append('Component')
1118 def get_component(component, session=None):
1120 Returns database id for given C{component}.
1122 @type component: string
1123 @param component: The name of the override type
1126 @return: the database id for the given component
1129 component = component.lower()
1131 q = session.query(Component).filter_by(component_name=component)
1135 except NoResultFound:
1138 __all__.append('get_component')
1141 def get_component_names(session=None):
1143 Returns list of strings of component names.
1146 @return: list of strings of component names
1149 return [ x.component_name for x in session.query(Component).all() ]
1151 __all__.append('get_component_names')
1153 ################################################################################
1155 class DBConfig(object):
1156 def __init__(self, *args, **kwargs):
1160 return '<DBConfig %s>' % self.name
1162 __all__.append('DBConfig')
1164 ################################################################################
1167 def get_or_set_contents_file_id(filename, session=None):
1169 Returns database id for given filename.
1171 If no matching file is found, a row is inserted.
1173 @type filename: string
1174 @param filename: The filename
1175 @type session: SQLAlchemy
1176 @param session: Optional SQL session object (a temporary one will be
1177 generated if not supplied). If not passed, a commit will be performed at
1178 the end of the function, otherwise the caller is responsible for commiting.
1181 @return: the database id for the given component
1184 q = session.query(ContentFilename).filter_by(filename=filename)
1187 ret = q.one().cafilename_id
1188 except NoResultFound:
1189 cf = ContentFilename()
1190 cf.filename = filename
1192 session.commit_or_flush()
1193 ret = cf.cafilename_id
1197 __all__.append('get_or_set_contents_file_id')
1200 def get_contents(suite, overridetype, section=None, session=None):
1202 Returns contents for a suite / overridetype combination, limiting
1203 to a section if not None.
1206 @param suite: Suite object
1208 @type overridetype: OverrideType
1209 @param overridetype: OverrideType object
1211 @type section: Section
1212 @param section: Optional section object to limit results to
1214 @type session: SQLAlchemy
1215 @param session: Optional SQL session object (a temporary one will be
1216 generated if not supplied)
1218 @rtype: ResultsProxy
1219 @return: ResultsProxy object set up to return tuples of (filename, section,
1223 # find me all of the contents for a given suite
1224 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1228 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1229 JOIN content_file_names n ON (c.filename=n.id)
1230 JOIN binaries b ON (b.id=c.binary_pkg)
1231 JOIN override o ON (o.package=b.package)
1232 JOIN section s ON (s.id=o.section)
1233 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1234 AND b.type=:overridetypename"""
1236 vals = {'suiteid': suite.suite_id,
1237 'overridetypeid': overridetype.overridetype_id,
1238 'overridetypename': overridetype.overridetype}
1240 if section is not None:
1241 contents_q += " AND s.id = :sectionid"
1242 vals['sectionid'] = section.section_id
1244 contents_q += " ORDER BY fn"
1246 return session.execute(contents_q, vals)
1248 __all__.append('get_contents')
1250 ################################################################################
1252 class ContentFilepath(object):
1253 def __init__(self, *args, **kwargs):
1257 return '<ContentFilepath %s>' % self.filepath
1259 __all__.append('ContentFilepath')
1262 def get_or_set_contents_path_id(filepath, session=None):
1264 Returns database id for given path.
1266 If no matching file is found, a row is inserted.
1268 @type filepath: string
1269 @param filepath: The filepath
1271 @type session: SQLAlchemy
1272 @param session: Optional SQL session object (a temporary one will be
1273 generated if not supplied). If not passed, a commit will be performed at
1274 the end of the function, otherwise the caller is responsible for commiting.
1277 @return: the database id for the given path
1280 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1283 ret = q.one().cafilepath_id
1284 except NoResultFound:
1285 cf = ContentFilepath()
1286 cf.filepath = filepath
1288 session.commit_or_flush()
1289 ret = cf.cafilepath_id
1293 __all__.append('get_or_set_contents_path_id')
1295 ################################################################################
1297 class ContentAssociation(object):
1298 def __init__(self, *args, **kwargs):
1302 return '<ContentAssociation %s>' % self.ca_id
1304 __all__.append('ContentAssociation')
1306 def insert_content_paths(binary_id, fullpaths, session=None):
1308 Make sure given path is associated with given binary id
1310 @type binary_id: int
1311 @param binary_id: the id of the binary
1312 @type fullpaths: list
1313 @param fullpaths: the list of paths of the file being associated with the binary
1314 @type session: SQLAlchemy session
1315 @param session: Optional SQLAlchemy session. If this is passed, the caller
1316 is responsible for ensuring a transaction has begun and committing the
1317 results or rolling back based on the result code. If not passed, a commit
1318 will be performed at the end of the function, otherwise the caller is
1319 responsible for commiting.
1321 @return: True upon success
1324 privatetrans = False
1326 session = DBConn().session()
1331 def generate_path_dicts():
1332 for fullpath in fullpaths:
1333 if fullpath.startswith( './' ):
1334 fullpath = fullpath[2:]
1336 yield {'filename':fullpath, 'id': binary_id }
1338 for d in generate_path_dicts():
1339 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1348 traceback.print_exc()
1350 # Only rollback if we set up the session ourself
1357 __all__.append('insert_content_paths')
1359 ################################################################################
1361 class DSCFile(object):
1362 def __init__(self, *args, **kwargs):
1366 return '<DSCFile %s>' % self.dscfile_id
1368 __all__.append('DSCFile')
1371 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1373 Returns a list of DSCFiles which may be empty
1375 @type dscfile_id: int (optional)
1376 @param dscfile_id: the dscfile_id of the DSCFiles to find
1378 @type source_id: int (optional)
1379 @param source_id: the source id related to the DSCFiles to find
1381 @type poolfile_id: int (optional)
1382 @param poolfile_id: the poolfile id related to the DSCFiles to find
1385 @return: Possibly empty list of DSCFiles
1388 q = session.query(DSCFile)
1390 if dscfile_id is not None:
1391 q = q.filter_by(dscfile_id=dscfile_id)
1393 if source_id is not None:
1394 q = q.filter_by(source_id=source_id)
1396 if poolfile_id is not None:
1397 q = q.filter_by(poolfile_id=poolfile_id)
1401 __all__.append('get_dscfiles')
1403 ################################################################################
1405 class ExternalOverride(ORMObject):
1406 def __init__(self, *args, **kwargs):
1410 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1412 __all__.append('ExternalOverride')
1414 ################################################################################
1416 class PoolFile(ORMObject):
1417 def __init__(self, filename = None, location = None, filesize = -1, \
1419 self.filename = filename
1420 self.location = location
1421 self.filesize = filesize
1422 self.md5sum = md5sum
1426 return os.path.join(self.location.path, self.filename)
1430 return os.path.basename(self.filename)
1432 def is_valid(self, filesize = -1, md5sum = None):
1433 return self.filesize == long(filesize) and self.md5sum == md5sum
1435 def properties(self):
1436 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1437 'sha256sum', 'location', 'source', 'binary', 'last_used']
1439 def not_null_constraints(self):
1440 return ['filename', 'md5sum', 'location']
1442 def identical_to(self, filename):
1444 compare size and hash with the given file
1447 @return: true if the given file has the same size and hash as this object; false otherwise
1449 st = os.stat(filename)
1450 if self.filesize != st.st_size:
1453 f = open(filename, "r")
1454 sha256sum = apt_pkg.sha256sum(f)
1455 if sha256sum != self.sha256sum:
1460 __all__.append('PoolFile')
1463 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1466 (ValidFileFound [boolean], PoolFile object or None)
1468 @type filename: string
1469 @param filename: the filename of the file to check against the DB
1472 @param filesize: the size of the file to check against the DB
1474 @type md5sum: string
1475 @param md5sum: the md5sum of the file to check against the DB
1477 @type location_id: int
1478 @param location_id: the id of the location to look in
1481 @return: Tuple of length 2.
1482 - If valid pool file found: (C{True}, C{PoolFile object})
1483 - If valid pool file not found:
1484 - (C{False}, C{None}) if no file found
1485 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1488 poolfile = session.query(Location).get(location_id). \
1489 files.filter_by(filename=filename).first()
1491 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1494 return (valid, poolfile)
1496 __all__.append('check_poolfile')
1498 # TODO: the implementation can trivially be inlined at the place where the
1499 # function is called
1501 def get_poolfile_by_id(file_id, session=None):
1503 Returns a PoolFile objects or None for the given id
1506 @param file_id: the id of the file to look for
1508 @rtype: PoolFile or None
1509 @return: either the PoolFile object or None
1512 return session.query(PoolFile).get(file_id)
1514 __all__.append('get_poolfile_by_id')
1517 def get_poolfile_like_name(filename, session=None):
1519 Returns an array of PoolFile objects which are like the given name
1521 @type filename: string
1522 @param filename: the filename of the file to check against the DB
1525 @return: array of PoolFile objects
1528 # TODO: There must be a way of properly using bind parameters with %FOO%
1529 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1533 __all__.append('get_poolfile_like_name')
1536 def add_poolfile(filename, datadict, location_id, session=None):
1538 Add a new file to the pool
1540 @type filename: string
1541 @param filename: filename
1543 @type datadict: dict
1544 @param datadict: dict with needed data
1546 @type location_id: int
1547 @param location_id: database id of the location
1550 @return: the PoolFile object created
1552 poolfile = PoolFile()
1553 poolfile.filename = filename
1554 poolfile.filesize = datadict["size"]
1555 poolfile.md5sum = datadict["md5sum"]
1556 poolfile.sha1sum = datadict["sha1sum"]
1557 poolfile.sha256sum = datadict["sha256sum"]
1558 poolfile.location_id = location_id
1560 session.add(poolfile)
1561 # Flush to get a file id (NB: This is not a commit)
1566 __all__.append('add_poolfile')
1568 ################################################################################
1570 class Fingerprint(ORMObject):
1571 def __init__(self, fingerprint = None):
1572 self.fingerprint = fingerprint
1574 def properties(self):
1575 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1578 def not_null_constraints(self):
1579 return ['fingerprint']
1581 __all__.append('Fingerprint')
1584 def get_fingerprint(fpr, session=None):
1586 Returns Fingerprint object for given fpr.
1589 @param fpr: The fpr to find / add
1591 @type session: SQLAlchemy
1592 @param session: Optional SQL session object (a temporary one will be
1593 generated if not supplied).
1596 @return: the Fingerprint object for the given fpr or None
1599 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1603 except NoResultFound:
1608 __all__.append('get_fingerprint')
1611 def get_or_set_fingerprint(fpr, session=None):
1613 Returns Fingerprint object for given fpr.
1615 If no matching fpr is found, a row is inserted.
1618 @param fpr: The fpr to find / add
1620 @type session: SQLAlchemy
1621 @param session: Optional SQL session object (a temporary one will be
1622 generated if not supplied). If not passed, a commit will be performed at
1623 the end of the function, otherwise the caller is responsible for commiting.
1624 A flush will be performed either way.
1627 @return: the Fingerprint object for the given fpr
1630 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1634 except NoResultFound:
1635 fingerprint = Fingerprint()
1636 fingerprint.fingerprint = fpr
1637 session.add(fingerprint)
1638 session.commit_or_flush()
1643 __all__.append('get_or_set_fingerprint')
1645 ################################################################################
1647 # Helper routine for Keyring class
1648 def get_ldap_name(entry):
1650 for k in ["cn", "mn", "sn"]:
1652 if ret and ret[0] != "" and ret[0] != "-":
1654 return " ".join(name)
1656 ################################################################################
1658 class Keyring(object):
1659 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1660 " --with-colons --fingerprint --fingerprint"
1665 def __init__(self, *args, **kwargs):
1669 return '<Keyring %s>' % self.keyring_name
1671 def de_escape_gpg_str(self, txt):
1672 esclist = re.split(r'(\\x..)', txt)
1673 for x in range(1,len(esclist),2):
1674 esclist[x] = "%c" % (int(esclist[x][2:],16))
1675 return "".join(esclist)
1677 def parse_address(self, uid):
1678 """parses uid and returns a tuple of real name and email address"""
1680 (name, address) = email.Utils.parseaddr(uid)
1681 name = re.sub(r"\s*[(].*[)]", "", name)
1682 name = self.de_escape_gpg_str(name)
1685 return (name, address)
1687 def load_keys(self, keyring):
1688 if not self.keyring_id:
1689 raise Exception('Must be initialized with database information')
1691 k = os.popen(self.gpg_invocation % keyring, "r")
1696 field = line.split(":")
1697 if field[0] == "pub":
1700 (name, addr) = self.parse_address(field[9])
1702 self.keys[key]["email"] = addr
1703 self.keys[key]["name"] = name
1704 self.keys[key]["fingerprints"] = []
1706 elif key and field[0] == "sub" and len(field) >= 12:
1707 signingkey = ("s" in field[11])
1708 elif key and field[0] == "uid":
1709 (name, addr) = self.parse_address(field[9])
1710 if "email" not in self.keys[key] and "@" in addr:
1711 self.keys[key]["email"] = addr
1712 self.keys[key]["name"] = name
1713 elif signingkey and field[0] == "fpr":
1714 self.keys[key]["fingerprints"].append(field[9])
1715 self.fpr_lookup[field[9]] = key
1717 def import_users_from_ldap(self, session):
1721 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1722 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1724 l = ldap.open(LDAPServer)
1725 l.simple_bind_s("","")
1726 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1727 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1728 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1730 ldap_fin_uid_id = {}
1737 uid = entry["uid"][0]
1738 name = get_ldap_name(entry)
1739 fingerprints = entry["keyFingerPrint"]
1741 for f in fingerprints:
1742 key = self.fpr_lookup.get(f, None)
1743 if key not in self.keys:
1745 self.keys[key]["uid"] = uid
1749 keyid = get_or_set_uid(uid, session).uid_id
1750 byuid[keyid] = (uid, name)
1751 byname[uid] = (keyid, name)
1753 return (byname, byuid)
1755 def generate_users_from_keyring(self, format, session):
1759 for x in self.keys.keys():
1760 if "email" not in self.keys[x]:
1762 self.keys[x]["uid"] = format % "invalid-uid"
1764 uid = format % self.keys[x]["email"]
1765 keyid = get_or_set_uid(uid, session).uid_id
1766 byuid[keyid] = (uid, self.keys[x]["name"])
1767 byname[uid] = (keyid, self.keys[x]["name"])
1768 self.keys[x]["uid"] = uid
1771 uid = format % "invalid-uid"
1772 keyid = get_or_set_uid(uid, session).uid_id
1773 byuid[keyid] = (uid, "ungeneratable user id")
1774 byname[uid] = (keyid, "ungeneratable user id")
1776 return (byname, byuid)
1778 __all__.append('Keyring')
1781 def get_keyring(keyring, session=None):
1783 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1784 If C{keyring} already has an entry, simply return the existing Keyring
1786 @type keyring: string
1787 @param keyring: the keyring name
1790 @return: the Keyring object for this keyring
1793 q = session.query(Keyring).filter_by(keyring_name=keyring)
1797 except NoResultFound:
1800 __all__.append('get_keyring')
1803 def get_active_keyring_paths(session=None):
1806 @return: list of active keyring paths
1808 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1810 __all__.append('get_active_keyring_paths')
1813 def get_primary_keyring_path(session=None):
1815 Get the full path to the highest priority active keyring
1818 @return: path to the active keyring with the highest priority or None if no
1819 keyring is configured
1821 keyrings = get_active_keyring_paths()
1823 if len(keyrings) > 0:
1828 __all__.append('get_primary_keyring_path')
1830 ################################################################################
1832 class KeyringACLMap(object):
1833 def __init__(self, *args, **kwargs):
1837 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1839 __all__.append('KeyringACLMap')
1841 ################################################################################
1843 class DBChange(object):
1844 def __init__(self, *args, **kwargs):
1848 return '<DBChange %s>' % self.changesname
1850 def clean_from_queue(self):
1851 session = DBConn().session().object_session(self)
1853 # Remove changes_pool_files entries
1856 # Remove changes_pending_files references
1859 # Clear out of queue
1860 self.in_queue = None
1861 self.approved_for_id = None
1863 __all__.append('DBChange')
1866 def get_dbchange(filename, session=None):
1868 returns DBChange object for given C{filename}.
1870 @type filename: string
1871 @param filename: the name of the file
1873 @type session: Session
1874 @param session: Optional SQLA session object (a temporary one will be
1875 generated if not supplied)
1878 @return: DBChange object for the given filename (C{None} if not present)
1881 q = session.query(DBChange).filter_by(changesname=filename)
1885 except NoResultFound:
1888 __all__.append('get_dbchange')
1890 ################################################################################
1892 class Location(ORMObject):
1893 def __init__(self, path = None, component = None):
1895 self.component = component
1896 # the column 'type' should go away, see comment at mapper
1897 self.archive_type = 'pool'
1899 def properties(self):
1900 return ['path', 'location_id', 'archive_type', 'component', \
1903 def not_null_constraints(self):
1904 return ['path', 'archive_type']
1906 __all__.append('Location')
1909 def get_location(location, component=None, archive=None, session=None):
1911 Returns Location object for the given combination of location, component
1914 @type location: string
1915 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1917 @type component: string
1918 @param component: the component name (if None, no restriction applied)
1920 @type archive: string
1921 @param archive: the archive name (if None, no restriction applied)
1923 @rtype: Location / None
1924 @return: Either a Location object or None if one can't be found
1927 q = session.query(Location).filter_by(path=location)
1929 if archive is not None:
1930 q = q.join(Archive).filter_by(archive_name=archive)
1932 if component is not None:
1933 q = q.join(Component).filter_by(component_name=component)
1937 except NoResultFound:
1940 __all__.append('get_location')
1942 ################################################################################
1944 class Maintainer(ORMObject):
1945 def __init__(self, name = None):
1948 def properties(self):
1949 return ['name', 'maintainer_id']
1951 def not_null_constraints(self):
1954 def get_split_maintainer(self):
1955 if not hasattr(self, 'name') or self.name is None:
1956 return ('', '', '', '')
1958 return fix_maintainer(self.name.strip())
1960 __all__.append('Maintainer')
1963 def get_or_set_maintainer(name, session=None):
1965 Returns Maintainer object for given maintainer name.
1967 If no matching maintainer name is found, a row is inserted.
1970 @param name: The maintainer name to add
1972 @type session: SQLAlchemy
1973 @param session: Optional SQL session object (a temporary one will be
1974 generated if not supplied). If not passed, a commit will be performed at
1975 the end of the function, otherwise the caller is responsible for commiting.
1976 A flush will be performed either way.
1979 @return: the Maintainer object for the given maintainer
1982 q = session.query(Maintainer).filter_by(name=name)
1985 except NoResultFound:
1986 maintainer = Maintainer()
1987 maintainer.name = name
1988 session.add(maintainer)
1989 session.commit_or_flush()
1994 __all__.append('get_or_set_maintainer')
1997 def get_maintainer(maintainer_id, session=None):
1999 Return the name of the maintainer behind C{maintainer_id} or None if that
2000 maintainer_id is invalid.
2002 @type maintainer_id: int
2003 @param maintainer_id: the id of the maintainer
2006 @return: the Maintainer with this C{maintainer_id}
2009 return session.query(Maintainer).get(maintainer_id)
2011 __all__.append('get_maintainer')
2013 ################################################################################
2015 class NewComment(object):
2016 def __init__(self, *args, **kwargs):
2020 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
2022 __all__.append('NewComment')
2025 def has_new_comment(package, version, session=None):
2027 Returns true if the given combination of C{package}, C{version} has a comment.
2029 @type package: string
2030 @param package: name of the package
2032 @type version: string
2033 @param version: package version
2035 @type session: Session
2036 @param session: Optional SQLA session object (a temporary one will be
2037 generated if not supplied)
2043 q = session.query(NewComment)
2044 q = q.filter_by(package=package)
2045 q = q.filter_by(version=version)
2047 return bool(q.count() > 0)
2049 __all__.append('has_new_comment')
2052 def get_new_comments(package=None, version=None, comment_id=None, session=None):
2054 Returns (possibly empty) list of NewComment objects for the given
2057 @type package: string (optional)
2058 @param package: name of the package
2060 @type version: string (optional)
2061 @param version: package version
2063 @type comment_id: int (optional)
2064 @param comment_id: An id of a comment
2066 @type session: Session
2067 @param session: Optional SQLA session object (a temporary one will be
2068 generated if not supplied)
2071 @return: A (possibly empty) list of NewComment objects will be returned
2074 q = session.query(NewComment)
2075 if package is not None: q = q.filter_by(package=package)
2076 if version is not None: q = q.filter_by(version=version)
2077 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
2081 __all__.append('get_new_comments')
2083 ################################################################################
2085 class Override(ORMObject):
2086 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
2087 section = None, priority = None):
2088 self.package = package
2090 self.component = component
2091 self.overridetype = overridetype
2092 self.section = section
2093 self.priority = priority
2095 def properties(self):
2096 return ['package', 'suite', 'component', 'overridetype', 'section', \
2099 def not_null_constraints(self):
2100 return ['package', 'suite', 'component', 'overridetype', 'section']
2102 __all__.append('Override')
2105 def get_override(package, suite=None, component=None, overridetype=None, session=None):
2107 Returns Override object for the given parameters
2109 @type package: string
2110 @param package: The name of the package
2112 @type suite: string, list or None
2113 @param suite: The name of the suite (or suites if a list) to limit to. If
2114 None, don't limit. Defaults to None.
2116 @type component: string, list or None
2117 @param component: The name of the component (or components if a list) to
2118 limit to. If None, don't limit. Defaults to None.
2120 @type overridetype: string, list or None
2121 @param overridetype: The name of the overridetype (or overridetypes if a list) to
2122 limit to. If None, don't limit. Defaults to None.
2124 @type session: Session
2125 @param session: Optional SQLA session object (a temporary one will be
2126 generated if not supplied)
2129 @return: A (possibly empty) list of Override objects will be returned
2132 q = session.query(Override)
2133 q = q.filter_by(package=package)
2135 if suite is not None:
2136 if not isinstance(suite, list): suite = [suite]
2137 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
2139 if component is not None:
2140 if not isinstance(component, list): component = [component]
2141 q = q.join(Component).filter(Component.component_name.in_(component))
2143 if overridetype is not None:
2144 if not isinstance(overridetype, list): overridetype = [overridetype]
2145 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
2149 __all__.append('get_override')
2152 ################################################################################
2154 class OverrideType(ORMObject):
2155 def __init__(self, overridetype = None):
2156 self.overridetype = overridetype
2158 def properties(self):
2159 return ['overridetype', 'overridetype_id', 'overrides_count']
2161 def not_null_constraints(self):
2162 return ['overridetype']
2164 __all__.append('OverrideType')
2167 def get_override_type(override_type, session=None):
2169 Returns OverrideType object for given C{override type}.
2171 @type override_type: string
2172 @param override_type: The name of the override type
2174 @type session: Session
2175 @param session: Optional SQLA session object (a temporary one will be
2176 generated if not supplied)
2179 @return: the database id for the given override type
2182 q = session.query(OverrideType).filter_by(overridetype=override_type)
2186 except NoResultFound:
2189 __all__.append('get_override_type')
2191 ################################################################################
2193 class PolicyQueue(object):
2194 def __init__(self, *args, **kwargs):
2198 return '<PolicyQueue %s>' % self.queue_name
2200 __all__.append('PolicyQueue')
2203 def get_policy_queue(queuename, session=None):
2205 Returns PolicyQueue object for given C{queue name}
2207 @type queuename: string
2208 @param queuename: The name of the queue
2210 @type session: Session
2211 @param session: Optional SQLA session object (a temporary one will be
2212 generated if not supplied)
2215 @return: PolicyQueue object for the given queue
2218 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2222 except NoResultFound:
2225 __all__.append('get_policy_queue')
2228 def get_policy_queue_from_path(pathname, session=None):
2230 Returns PolicyQueue object for given C{path name}
2232 @type queuename: string
2233 @param queuename: The path
2235 @type session: Session
2236 @param session: Optional SQLA session object (a temporary one will be
2237 generated if not supplied)
2240 @return: PolicyQueue object for the given queue
2243 q = session.query(PolicyQueue).filter_by(path=pathname)
2247 except NoResultFound:
2250 __all__.append('get_policy_queue_from_path')
2252 ################################################################################
2254 class Priority(ORMObject):
2255 def __init__(self, priority = None, level = None):
2256 self.priority = priority
2259 def properties(self):
2260 return ['priority', 'priority_id', 'level', 'overrides_count']
2262 def not_null_constraints(self):
2263 return ['priority', 'level']
2265 def __eq__(self, val):
2266 if isinstance(val, str):
2267 return (self.priority == val)
2268 # This signals to use the normal comparison operator
2269 return NotImplemented
2271 def __ne__(self, val):
2272 if isinstance(val, str):
2273 return (self.priority != val)
2274 # This signals to use the normal comparison operator
2275 return NotImplemented
2277 __all__.append('Priority')
2280 def get_priority(priority, session=None):
2282 Returns Priority object for given C{priority name}.
2284 @type priority: string
2285 @param priority: The name of the priority
2287 @type session: Session
2288 @param session: Optional SQLA session object (a temporary one will be
2289 generated if not supplied)
2292 @return: Priority object for the given priority
2295 q = session.query(Priority).filter_by(priority=priority)
2299 except NoResultFound:
2302 __all__.append('get_priority')
2305 def get_priorities(session=None):
2307 Returns dictionary of priority names -> id mappings
2309 @type session: Session
2310 @param session: Optional SQL session object (a temporary one will be
2311 generated if not supplied)
2314 @return: dictionary of priority names -> id mappings
2318 q = session.query(Priority)
2320 ret[x.priority] = x.priority_id
2324 __all__.append('get_priorities')
2326 ################################################################################
2328 class Section(ORMObject):
2329 def __init__(self, section = None):
2330 self.section = section
2332 def properties(self):
2333 return ['section', 'section_id', 'overrides_count']
2335 def not_null_constraints(self):
2338 def __eq__(self, val):
2339 if isinstance(val, str):
2340 return (self.section == val)
2341 # This signals to use the normal comparison operator
2342 return NotImplemented
2344 def __ne__(self, val):
2345 if isinstance(val, str):
2346 return (self.section != val)
2347 # This signals to use the normal comparison operator
2348 return NotImplemented
2350 __all__.append('Section')
2353 def get_section(section, session=None):
2355 Returns Section object for given C{section name}.
2357 @type section: string
2358 @param section: The name of the section
2360 @type session: Session
2361 @param session: Optional SQLA session object (a temporary one will be
2362 generated if not supplied)
2365 @return: Section object for the given section name
2368 q = session.query(Section).filter_by(section=section)
2372 except NoResultFound:
2375 __all__.append('get_section')
2378 def get_sections(session=None):
2380 Returns dictionary of section names -> id mappings
2382 @type session: Session
2383 @param session: Optional SQL session object (a temporary one will be
2384 generated if not supplied)
2387 @return: dictionary of section names -> id mappings
2391 q = session.query(Section)
2393 ret[x.section] = x.section_id
2397 __all__.append('get_sections')
2399 ################################################################################
2401 class SrcContents(ORMObject):
2402 def __init__(self, file = None, source = None):
2404 self.source = source
2406 def properties(self):
2407 return ['file', 'source']
2409 __all__.append('SrcContents')
2411 ################################################################################
2413 class DBSource(ORMObject):
2414 def __init__(self, source = None, version = None, maintainer = None, \
2415 changedby = None, poolfile = None, install_date = None, fingerprint = None):
2416 self.source = source
2417 self.version = version
2418 self.maintainer = maintainer
2419 self.changedby = changedby
2420 self.poolfile = poolfile
2421 self.install_date = install_date
2422 self.fingerprint = fingerprint
2426 return self.source_id
2428 def properties(self):
2429 return ['source', 'source_id', 'maintainer', 'changedby', \
2430 'fingerprint', 'poolfile', 'version', 'suites_count', \
2431 'install_date', 'binaries_count', 'uploaders_count']
2433 def not_null_constraints(self):
2434 return ['source', 'version', 'install_date', 'maintainer', \
2435 'changedby', 'poolfile']
2437 def read_control_fields(self):
2439 Reads the control information from a dsc
2442 @return: fields is the dsc information in a dictionary form
2444 fullpath = self.poolfile.fullpath
2445 contents = open(fullpath, 'r').read()
2446 signed_file = SignedFile(contents, keyrings=[], require_signature=False)
2447 fields = apt_pkg.TagSection(signed_file.contents)
2450 metadata = association_proxy('key', 'value')
2452 def get_component_name(self):
2453 return self.poolfile.location.component.component_name
2455 def scan_contents(self):
2457 Returns a set of names for non directories. The path names are
2458 normalized after converting them from either utf-8 or iso8859-1
2461 fullpath = self.poolfile.fullpath
2462 from daklib.contents import UnpackedSource
2463 unpacked = UnpackedSource(fullpath)
2465 for name in unpacked.get_all_filenames():
2466 # enforce proper utf-8 encoding
2468 name.decode('utf-8')
2469 except UnicodeDecodeError:
2470 name = name.decode('iso8859-1').encode('utf-8')
2474 __all__.append('DBSource')
2477 def source_exists(source, source_version, suites = ["any"], session=None):
2479 Ensure that source exists somewhere in the archive for the binary
2480 upload being processed.
2481 1. exact match => 1.0-3
2482 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2484 @type source: string
2485 @param source: source name
2487 @type source_version: string
2488 @param source_version: expected source version
2491 @param suites: list of suites to check in, default I{any}
2493 @type session: Session
2494 @param session: Optional SQLA session object (a temporary one will be
2495 generated if not supplied)
2498 @return: returns 1 if a source with expected version is found, otherwise 0
2505 from daklib.regexes import re_bin_only_nmu
2506 orig_source_version = re_bin_only_nmu.sub('', source_version)
2508 for suite in suites:
2509 q = session.query(DBSource).filter_by(source=source). \
2510 filter(DBSource.version.in_([source_version, orig_source_version]))
2512 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2513 s = get_suite(suite, session)
2515 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2516 considered_suites = [ vc.reference for vc in enhances_vcs ]
2517 considered_suites.append(s)
2519 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2524 # No source found so return not ok
2529 __all__.append('source_exists')
2532 def get_suites_source_in(source, session=None):
2534 Returns list of Suite objects which given C{source} name is in
2537 @param source: DBSource package name to search for
2540 @return: list of Suite objects for the given source
2543 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2545 __all__.append('get_suites_source_in')
2548 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2550 Returns list of DBSource objects for given C{source} name and other parameters
2553 @param source: DBSource package name to search for
2555 @type version: str or None
2556 @param version: DBSource version name to search for or None if not applicable
2558 @type dm_upload_allowed: bool
2559 @param dm_upload_allowed: If None, no effect. If True or False, only
2560 return packages with that dm_upload_allowed setting
2562 @type session: Session
2563 @param session: Optional SQL session object (a temporary one will be
2564 generated if not supplied)
2567 @return: list of DBSource objects for the given name (may be empty)
2570 q = session.query(DBSource).filter_by(source=source)
2572 if version is not None:
2573 q = q.filter_by(version=version)
2575 if dm_upload_allowed is not None:
2576 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2580 __all__.append('get_sources_from_name')
2582 # FIXME: This function fails badly if it finds more than 1 source package and
2583 # its implementation is trivial enough to be inlined.
2585 def get_source_in_suite(source, suite, session=None):
2587 Returns a DBSource object for a combination of C{source} and C{suite}.
2589 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2590 - B{suite} - a suite name, eg. I{unstable}
2592 @type source: string
2593 @param source: source package name
2596 @param suite: the suite name
2599 @return: the version for I{source} in I{suite}
2603 q = get_suite(suite, session).get_sources(source)
2606 except NoResultFound:
2609 __all__.append('get_source_in_suite')
2612 def import_metadata_into_db(obj, session=None):
2614 This routine works on either DBBinary or DBSource objects and imports
2615 their metadata into the database
2617 fields = obj.read_control_fields()
2618 for k in fields.keys():
2621 val = str(fields[k])
2622 except UnicodeEncodeError:
2623 # Fall back to UTF-8
2625 val = fields[k].encode('utf-8')
2626 except UnicodeEncodeError:
2627 # Finally try iso8859-1
2628 val = fields[k].encode('iso8859-1')
2629 # Otherwise we allow the exception to percolate up and we cause
2630 # a reject as someone is playing silly buggers
2632 obj.metadata[get_or_set_metadatakey(k, session)] = val
2634 session.commit_or_flush()
2636 __all__.append('import_metadata_into_db')
2639 ################################################################################
2641 def split_uploaders(uploaders_list):
2643 Split the Uploaders field into the individual uploaders and yield each of
2644 them. Beware: email addresses might contain commas.
2647 for uploader in re.sub(">[ ]*,", ">\t", uploaders_list).split("\t"):
2648 yield uploader.strip()
2651 def add_dsc_to_db(u, filename, session=None):
2652 entry = u.pkg.files[filename]
2656 source.source = u.pkg.dsc["source"]
2657 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2658 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2659 # If Changed-By isn't available, fall back to maintainer
2660 if u.pkg.changes.has_key("changed-by"):
2661 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2663 source.changedby_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2664 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2665 source.install_date = datetime.now().date()
2667 dsc_component = entry["component"]
2668 dsc_location_id = entry["location id"]
2670 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2672 # Set up a new poolfile if necessary
2673 if not entry.has_key("files id") or not entry["files id"]:
2674 filename = entry["pool name"] + filename
2675 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2677 pfs.append(poolfile)
2678 entry["files id"] = poolfile.file_id
2680 source.poolfile_id = entry["files id"]
2683 suite_names = u.pkg.changes["distribution"].keys()
2684 source.suites = session.query(Suite). \
2685 filter(Suite.suite_name.in_(suite_names)).all()
2687 # Add the source files to the DB (files and dsc_files)
2689 dscfile.source_id = source.source_id
2690 dscfile.poolfile_id = entry["files id"]
2691 session.add(dscfile)
2693 for dsc_file, dentry in u.pkg.dsc_files.items():
2695 df.source_id = source.source_id
2697 # If the .orig tarball is already in the pool, it's
2698 # files id is stored in dsc_files by check_dsc().
2699 files_id = dentry.get("files id", None)
2701 # Find the entry in the files hash
2702 # TODO: Bail out here properly
2704 for f, e in u.pkg.files.items():
2709 if files_id is None:
2710 filename = dfentry["pool name"] + dsc_file
2712 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2713 # FIXME: needs to check for -1/-2 and or handle exception
2714 if found and obj is not None:
2715 files_id = obj.file_id
2718 # If still not found, add it
2719 if files_id is None:
2720 # HACK: Force sha1sum etc into dentry
2721 dentry["sha1sum"] = dfentry["sha1sum"]
2722 dentry["sha256sum"] = dfentry["sha256sum"]
2723 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2724 pfs.append(poolfile)
2725 files_id = poolfile.file_id
2727 poolfile = get_poolfile_by_id(files_id, session)
2728 if poolfile is None:
2729 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2730 pfs.append(poolfile)
2732 df.poolfile_id = files_id
2735 # Add the src_uploaders to the DB
2737 session.refresh(source)
2738 source.uploaders = [source.maintainer]
2739 if u.pkg.dsc.has_key("uploaders"):
2740 for up in split_uploaders(u.pkg.dsc["uploaders"]):
2741 source.uploaders.append(get_or_set_maintainer(up, session))
2745 return source, dsc_component, dsc_location_id, pfs
2747 __all__.append('add_dsc_to_db')
2750 def add_deb_to_db(u, filename, session=None):
2752 Contrary to what you might expect, this routine deals with both
2753 debs and udebs. That info is in 'dbtype', whilst 'type' is
2754 'deb' for both of them
2757 entry = u.pkg.files[filename]
2760 bin.package = entry["package"]
2761 bin.version = entry["version"]
2762 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2763 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2764 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2765 bin.binarytype = entry["dbtype"]
2768 filename = entry["pool name"] + filename
2769 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2770 if not entry.get("location id", None):
2771 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2773 if entry.get("files id", None):
2774 poolfile = get_poolfile_by_id(bin.poolfile_id)
2775 bin.poolfile_id = entry["files id"]
2777 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2778 bin.poolfile_id = entry["files id"] = poolfile.file_id
2781 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2783 # If we couldn't find anything and the upload contains Arch: source,
2784 # fall back to trying the source package, source version uploaded
2785 # This maintains backwards compatibility with previous dak behaviour
2786 # and deals with slightly broken binary debs which don't properly
2787 # declare their source package name
2788 if len(bin_sources) == 0:
2789 if u.pkg.changes["architecture"].has_key("source") \
2790 and u.pkg.dsc.has_key("source") and u.pkg.dsc.has_key("version"):
2791 bin_sources = get_sources_from_name(u.pkg.dsc["source"], u.pkg.dsc["version"], session=session)
2793 # If we couldn't find a source here, we reject
2794 # TODO: Fix this so that it doesn't kill process-upload and instead just
2795 # performs a reject. To be honest, we should probably spot this
2796 # *much* earlier than here
2797 if len(bin_sources) != 1:
2798 raise NoSourceFieldError("Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2799 (bin.package, bin.version, entry["architecture"],
2800 filename, bin.binarytype, u.pkg.changes["fingerprint"]))
2802 bin.source_id = bin_sources[0].source_id
2804 if entry.has_key("built-using"):
2805 for srcname, version in entry["built-using"]:
2806 exsources = get_sources_from_name(srcname, version, session=session)
2807 if len(exsources) != 1:
2808 raise NoSourceFieldError("Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2809 (srcname, version, bin.package, bin.version, entry["architecture"],
2810 filename, bin.binarytype, u.pkg.changes["fingerprint"]))
2812 bin.extra_sources.append(exsources[0])
2814 # Add and flush object so it has an ID
2817 suite_names = u.pkg.changes["distribution"].keys()
2818 bin.suites = session.query(Suite). \
2819 filter(Suite.suite_name.in_(suite_names)).all()
2823 # Deal with contents - disabled for now
2824 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2826 # print "REJECT\nCould not determine contents of package %s" % bin.package
2827 # session.rollback()
2828 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2830 return bin, poolfile
2832 __all__.append('add_deb_to_db')
2834 ################################################################################
2836 class SourceACL(object):
2837 def __init__(self, *args, **kwargs):
2841 return '<SourceACL %s>' % self.source_acl_id
2843 __all__.append('SourceACL')
2845 ################################################################################
2847 class SrcFormat(object):
2848 def __init__(self, *args, **kwargs):
2852 return '<SrcFormat %s>' % (self.format_name)
2854 __all__.append('SrcFormat')
2856 ################################################################################
2858 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2859 ('SuiteID', 'suite_id'),
2860 ('Version', 'version'),
2861 ('Origin', 'origin'),
2863 ('Description', 'description'),
2864 ('Untouchable', 'untouchable'),
2865 ('Announce', 'announce'),
2866 ('Codename', 'codename'),
2867 ('OverrideCodename', 'overridecodename'),
2868 ('ValidTime', 'validtime'),
2869 ('Priority', 'priority'),
2870 ('NotAutomatic', 'notautomatic'),
2871 ('CopyChanges', 'copychanges'),
2872 ('OverrideSuite', 'overridesuite')]
2874 # Why the heck don't we have any UNIQUE constraints in table suite?
2875 # TODO: Add UNIQUE constraints for appropriate columns.
2876 class Suite(ORMObject):
2877 def __init__(self, suite_name = None, version = None):
2878 self.suite_name = suite_name
2879 self.version = version
2881 def properties(self):
2882 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2885 def not_null_constraints(self):
2886 return ['suite_name']
2888 def __eq__(self, val):
2889 if isinstance(val, str):
2890 return (self.suite_name == val)
2891 # This signals to use the normal comparison operator
2892 return NotImplemented
2894 def __ne__(self, val):
2895 if isinstance(val, str):
2896 return (self.suite_name != val)
2897 # This signals to use the normal comparison operator
2898 return NotImplemented
2902 for disp, field in SUITE_FIELDS:
2903 val = getattr(self, field, None)
2905 ret.append("%s: %s" % (disp, val))
2907 return "\n".join(ret)
2909 def get_architectures(self, skipsrc=False, skipall=False):
2911 Returns list of Architecture objects
2913 @type skipsrc: boolean
2914 @param skipsrc: Whether to skip returning the 'source' architecture entry
2917 @type skipall: boolean
2918 @param skipall: Whether to skip returning the 'all' architecture entry
2922 @return: list of Architecture objects for the given name (may be empty)
2925 q = object_session(self).query(Architecture).with_parent(self)
2927 q = q.filter(Architecture.arch_string != 'source')
2929 q = q.filter(Architecture.arch_string != 'all')
2930 return q.order_by(Architecture.arch_string).all()
2932 def get_sources(self, source):
2934 Returns a query object representing DBSource that is part of C{suite}.
2936 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2938 @type source: string
2939 @param source: source package name
2941 @rtype: sqlalchemy.orm.query.Query
2942 @return: a query of DBSource
2946 session = object_session(self)
2947 return session.query(DBSource).filter_by(source = source). \
2950 def get_overridesuite(self):
2951 if self.overridesuite is None:
2954 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2958 return os.path.join(self.archive.path, 'dists', self.suite_name)
2960 __all__.append('Suite')
2963 def get_suite(suite, session=None):
2965 Returns Suite object for given C{suite name}.
2968 @param suite: The name of the suite
2970 @type session: Session
2971 @param session: Optional SQLA session object (a temporary one will be
2972 generated if not supplied)
2975 @return: Suite object for the requested suite name (None if not present)
2978 q = session.query(Suite).filter_by(suite_name=suite)
2982 except NoResultFound:
2985 __all__.append('get_suite')
2987 ################################################################################
2990 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2992 Returns list of Architecture objects for given C{suite} name. The list is
2993 empty if suite does not exist.
2996 @param suite: Suite name to search for
2998 @type skipsrc: boolean
2999 @param skipsrc: Whether to skip returning the 'source' architecture entry
3002 @type skipall: boolean
3003 @param skipall: Whether to skip returning the 'all' architecture entry
3006 @type session: Session
3007 @param session: Optional SQL session object (a temporary one will be
3008 generated if not supplied)
3011 @return: list of Architecture objects for the given name (may be empty)
3015 return get_suite(suite, session).get_architectures(skipsrc, skipall)
3016 except AttributeError:
3019 __all__.append('get_suite_architectures')
3021 ################################################################################
3023 class Uid(ORMObject):
3024 def __init__(self, uid = None, name = None):
3028 def __eq__(self, val):
3029 if isinstance(val, str):
3030 return (self.uid == val)
3031 # This signals to use the normal comparison operator
3032 return NotImplemented
3034 def __ne__(self, val):
3035 if isinstance(val, str):
3036 return (self.uid != val)
3037 # This signals to use the normal comparison operator
3038 return NotImplemented
3040 def properties(self):
3041 return ['uid', 'name', 'fingerprint']
3043 def not_null_constraints(self):
3046 __all__.append('Uid')
3049 def get_or_set_uid(uidname, session=None):
3051 Returns uid object for given uidname.
3053 If no matching uidname is found, a row is inserted.
3055 @type uidname: string
3056 @param uidname: The uid to add
3058 @type session: SQLAlchemy
3059 @param session: Optional SQL session object (a temporary one will be
3060 generated if not supplied). If not passed, a commit will be performed at
3061 the end of the function, otherwise the caller is responsible for commiting.
3064 @return: the uid object for the given uidname
3067 q = session.query(Uid).filter_by(uid=uidname)
3071 except NoResultFound:
3075 session.commit_or_flush()
3080 __all__.append('get_or_set_uid')
3083 def get_uid_from_fingerprint(fpr, session=None):
3084 q = session.query(Uid)
3085 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
3089 except NoResultFound:
3092 __all__.append('get_uid_from_fingerprint')
3094 ################################################################################
3096 class UploadBlock(object):
3097 def __init__(self, *args, **kwargs):
3101 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
3103 __all__.append('UploadBlock')
3105 ################################################################################
3107 class MetadataKey(ORMObject):
3108 def __init__(self, key = None):
3111 def properties(self):
3114 def not_null_constraints(self):
3117 __all__.append('MetadataKey')
3120 def get_or_set_metadatakey(keyname, session=None):
3122 Returns MetadataKey object for given uidname.
3124 If no matching keyname is found, a row is inserted.
3126 @type uidname: string
3127 @param uidname: The keyname to add
3129 @type session: SQLAlchemy
3130 @param session: Optional SQL session object (a temporary one will be
3131 generated if not supplied). If not passed, a commit will be performed at
3132 the end of the function, otherwise the caller is responsible for commiting.
3135 @return: the metadatakey object for the given keyname
3138 q = session.query(MetadataKey).filter_by(key=keyname)
3142 except NoResultFound:
3143 ret = MetadataKey(keyname)
3145 session.commit_or_flush()
3149 __all__.append('get_or_set_metadatakey')
3151 ################################################################################
3153 class BinaryMetadata(ORMObject):
3154 def __init__(self, key = None, value = None, binary = None):
3157 self.binary = binary
3159 def properties(self):
3160 return ['binary', 'key', 'value']
3162 def not_null_constraints(self):
3165 __all__.append('BinaryMetadata')
3167 ################################################################################
3169 class SourceMetadata(ORMObject):
3170 def __init__(self, key = None, value = None, source = None):
3173 self.source = source
3175 def properties(self):
3176 return ['source', 'key', 'value']
3178 def not_null_constraints(self):
3181 __all__.append('SourceMetadata')
3183 ################################################################################
3185 class VersionCheck(ORMObject):
3186 def __init__(self, *args, **kwargs):
3189 def properties(self):
3190 #return ['suite_id', 'check', 'reference_id']
3193 def not_null_constraints(self):
3194 return ['suite', 'check', 'reference']
3196 __all__.append('VersionCheck')
3199 def get_version_checks(suite_name, check = None, session = None):
3200 suite = get_suite(suite_name, session)
3202 # Make sure that what we return is iterable so that list comprehensions
3203 # involving this don't cause a traceback
3205 q = session.query(VersionCheck).filter_by(suite=suite)
3207 q = q.filter_by(check=check)
3210 __all__.append('get_version_checks')
3212 ################################################################################
3214 class DBConn(object):
3216 database module init.
3220 def __init__(self, *args, **kwargs):
3221 self.__dict__ = self.__shared_state
3223 if not getattr(self, 'initialised', False):
3224 self.initialised = True
3225 self.debug = kwargs.has_key('debug')
3228 def __setuptables(self):
3235 'binaries_metadata',
3239 'build_queue_files',
3240 'build_queue_policy_files',
3245 'changes_pending_binaries',
3246 'changes_pending_files',
3247 'changes_pending_source',
3248 'changes_pending_files_map',
3249 'changes_pending_source_files',
3250 'changes_pool_files',
3252 'external_overrides',
3253 'extra_src_references',
3262 # TODO: the maintainer column in table override should be removed.
3276 'suite_architectures',
3277 'suite_build_queue_copy',
3278 'suite_src_formats',
3285 'almost_obsolete_all_associations',
3286 'almost_obsolete_src_associations',
3287 'any_associations_source',
3288 'bin_associations_binaries',
3289 'binaries_suite_arch',
3290 'binfiles_suite_component_arch',
3293 'newest_all_associations',
3294 'newest_any_associations',
3296 'newest_src_association',
3297 'obsolete_all_associations',
3298 'obsolete_any_associations',
3299 'obsolete_any_by_all_associations',
3300 'obsolete_src_associations',
3302 'src_associations_bin',
3303 'src_associations_src',
3304 'suite_arch_by_name',
3307 for table_name in tables:
3308 table = Table(table_name, self.db_meta, \
3309 autoload=True, useexisting=True)
3310 setattr(self, 'tbl_%s' % table_name, table)
3312 for view_name in views:
3313 view = Table(view_name, self.db_meta, autoload=True)
3314 setattr(self, 'view_%s' % view_name, view)
3316 def __setupmappers(self):
3317 mapper(Architecture, self.tbl_architecture,
3318 properties = dict(arch_id = self.tbl_architecture.c.id,
3319 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3320 order_by=self.tbl_suite.c.suite_name,
3321 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
3322 extension = validator)
3324 mapper(Archive, self.tbl_archive,
3325 properties = dict(archive_id = self.tbl_archive.c.id,
3326 archive_name = self.tbl_archive.c.name))
3328 mapper(BuildQueue, self.tbl_build_queue,
3329 properties = dict(queue_id = self.tbl_build_queue.c.id))
3331 mapper(BuildQueueFile, self.tbl_build_queue_files,
3332 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3333 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3335 mapper(BuildQueuePolicyFile, self.tbl_build_queue_policy_files,
3337 build_queue = relation(BuildQueue, backref='policy_queue_files'),
3338 file = relation(ChangePendingFile, lazy='joined')))
3340 mapper(DBBinary, self.tbl_binaries,
3341 properties = dict(binary_id = self.tbl_binaries.c.id,
3342 package = self.tbl_binaries.c.package,
3343 version = self.tbl_binaries.c.version,
3344 maintainer_id = self.tbl_binaries.c.maintainer,
3345 maintainer = relation(Maintainer),
3346 source_id = self.tbl_binaries.c.source,
3347 source = relation(DBSource, backref='binaries'),
3348 arch_id = self.tbl_binaries.c.architecture,
3349 architecture = relation(Architecture),
3350 poolfile_id = self.tbl_binaries.c.file,
3351 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3352 binarytype = self.tbl_binaries.c.type,
3353 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3354 fingerprint = relation(Fingerprint),
3355 install_date = self.tbl_binaries.c.install_date,
3356 suites = relation(Suite, secondary=self.tbl_bin_associations,
3357 backref=backref('binaries', lazy='dynamic')),
3358 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3359 backref=backref('extra_binary_references', lazy='dynamic')),
3360 key = relation(BinaryMetadata, cascade='all',
3361 collection_class=attribute_mapped_collection('key'))),
3362 extension = validator)
3364 mapper(BinaryACL, self.tbl_binary_acl,
3365 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3367 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3368 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3369 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3370 architecture = relation(Architecture)))
3372 mapper(Component, self.tbl_component,
3373 properties = dict(component_id = self.tbl_component.c.id,
3374 component_name = self.tbl_component.c.name),
3375 extension = validator)
3377 mapper(DBConfig, self.tbl_config,
3378 properties = dict(config_id = self.tbl_config.c.id))
3380 mapper(DSCFile, self.tbl_dsc_files,
3381 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3382 source_id = self.tbl_dsc_files.c.source,
3383 source = relation(DBSource),
3384 poolfile_id = self.tbl_dsc_files.c.file,
3385 poolfile = relation(PoolFile)))
3387 mapper(ExternalOverride, self.tbl_external_overrides,
3389 suite_id = self.tbl_external_overrides.c.suite,
3390 suite = relation(Suite),
3391 component_id = self.tbl_external_overrides.c.component,
3392 component = relation(Component)))
3394 mapper(PoolFile, self.tbl_files,
3395 properties = dict(file_id = self.tbl_files.c.id,
3396 filesize = self.tbl_files.c.size,
3397 location_id = self.tbl_files.c.location,
3398 location = relation(Location,
3399 # using lazy='dynamic' in the back
3400 # reference because we have A LOT of
3401 # files in one location
3402 backref=backref('files', lazy='dynamic'))),
3403 extension = validator)
3405 mapper(Fingerprint, self.tbl_fingerprint,
3406 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3407 uid_id = self.tbl_fingerprint.c.uid,
3408 uid = relation(Uid),
3409 keyring_id = self.tbl_fingerprint.c.keyring,
3410 keyring = relation(Keyring),
3411 source_acl = relation(SourceACL),
3412 binary_acl = relation(BinaryACL)),
3413 extension = validator)
3415 mapper(Keyring, self.tbl_keyrings,
3416 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3417 keyring_id = self.tbl_keyrings.c.id))
3419 mapper(DBChange, self.tbl_changes,
3420 properties = dict(change_id = self.tbl_changes.c.id,
3421 poolfiles = relation(PoolFile,
3422 secondary=self.tbl_changes_pool_files,
3423 backref="changeslinks"),
3424 seen = self.tbl_changes.c.seen,
3425 source = self.tbl_changes.c.source,
3426 binaries = self.tbl_changes.c.binaries,
3427 architecture = self.tbl_changes.c.architecture,
3428 distribution = self.tbl_changes.c.distribution,
3429 urgency = self.tbl_changes.c.urgency,
3430 maintainer = self.tbl_changes.c.maintainer,
3431 changedby = self.tbl_changes.c.changedby,
3432 date = self.tbl_changes.c.date,
3433 version = self.tbl_changes.c.version,
3434 files = relation(ChangePendingFile,
3435 secondary=self.tbl_changes_pending_files_map,
3436 backref="changesfile"),
3437 in_queue_id = self.tbl_changes.c.in_queue,
3438 in_queue = relation(PolicyQueue,
3439 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3440 approved_for_id = self.tbl_changes.c.approved_for))
3442 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3443 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3445 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3446 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3447 filename = self.tbl_changes_pending_files.c.filename,
3448 size = self.tbl_changes_pending_files.c.size,
3449 md5sum = self.tbl_changes_pending_files.c.md5sum,
3450 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3451 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3453 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3454 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3455 change = relation(DBChange),
3456 maintainer = relation(Maintainer,
3457 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3458 changedby = relation(Maintainer,
3459 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3460 fingerprint = relation(Fingerprint),
3461 source_files = relation(ChangePendingFile,
3462 secondary=self.tbl_changes_pending_source_files,
3463 backref="pending_sources")))
3466 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3467 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3468 keyring = relation(Keyring, backref="keyring_acl_map"),
3469 architecture = relation(Architecture)))
3471 mapper(Location, self.tbl_location,
3472 properties = dict(location_id = self.tbl_location.c.id,
3473 component_id = self.tbl_location.c.component,
3474 component = relation(Component, backref='location'),
3475 archive_id = self.tbl_location.c.archive,
3476 archive = relation(Archive),
3477 # FIXME: the 'type' column is old cruft and
3478 # should be removed in the future.
3479 archive_type = self.tbl_location.c.type),
3480 extension = validator)
3482 mapper(Maintainer, self.tbl_maintainer,
3483 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3484 maintains_sources = relation(DBSource, backref='maintainer',
3485 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3486 changed_sources = relation(DBSource, backref='changedby',
3487 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3488 extension = validator)
3490 mapper(NewComment, self.tbl_new_comments,
3491 properties = dict(comment_id = self.tbl_new_comments.c.id))
3493 mapper(Override, self.tbl_override,
3494 properties = dict(suite_id = self.tbl_override.c.suite,
3495 suite = relation(Suite, \
3496 backref=backref('overrides', lazy='dynamic')),
3497 package = self.tbl_override.c.package,
3498 component_id = self.tbl_override.c.component,
3499 component = relation(Component, \
3500 backref=backref('overrides', lazy='dynamic')),
3501 priority_id = self.tbl_override.c.priority,
3502 priority = relation(Priority, \
3503 backref=backref('overrides', lazy='dynamic')),
3504 section_id = self.tbl_override.c.section,
3505 section = relation(Section, \
3506 backref=backref('overrides', lazy='dynamic')),
3507 overridetype_id = self.tbl_override.c.type,
3508 overridetype = relation(OverrideType, \
3509 backref=backref('overrides', lazy='dynamic'))))
3511 mapper(OverrideType, self.tbl_override_type,
3512 properties = dict(overridetype = self.tbl_override_type.c.type,
3513 overridetype_id = self.tbl_override_type.c.id))
3515 mapper(PolicyQueue, self.tbl_policy_queue,
3516 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3518 mapper(Priority, self.tbl_priority,
3519 properties = dict(priority_id = self.tbl_priority.c.id))
3521 mapper(Section, self.tbl_section,
3522 properties = dict(section_id = self.tbl_section.c.id,
3523 section=self.tbl_section.c.section))
3525 mapper(DBSource, self.tbl_source,
3526 properties = dict(source_id = self.tbl_source.c.id,
3527 version = self.tbl_source.c.version,
3528 maintainer_id = self.tbl_source.c.maintainer,
3529 poolfile_id = self.tbl_source.c.file,
3530 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3531 fingerprint_id = self.tbl_source.c.sig_fpr,
3532 fingerprint = relation(Fingerprint),
3533 changedby_id = self.tbl_source.c.changedby,
3534 srcfiles = relation(DSCFile,
3535 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3536 suites = relation(Suite, secondary=self.tbl_src_associations,
3537 backref=backref('sources', lazy='dynamic')),
3538 uploaders = relation(Maintainer,
3539 secondary=self.tbl_src_uploaders),
3540 key = relation(SourceMetadata, cascade='all',
3541 collection_class=attribute_mapped_collection('key'))),
3542 extension = validator)
3544 mapper(SourceACL, self.tbl_source_acl,
3545 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3547 mapper(SrcFormat, self.tbl_src_format,
3548 properties = dict(src_format_id = self.tbl_src_format.c.id,
3549 format_name = self.tbl_src_format.c.format_name))
3551 mapper(Suite, self.tbl_suite,
3552 properties = dict(suite_id = self.tbl_suite.c.id,
3553 policy_queue = relation(PolicyQueue),
3554 copy_queues = relation(BuildQueue,
3555 secondary=self.tbl_suite_build_queue_copy),
3556 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
3557 backref=backref('suites', lazy='dynamic')),
3558 archive = relation(Archive, backref='suites')),
3559 extension = validator)
3561 mapper(Uid, self.tbl_uid,
3562 properties = dict(uid_id = self.tbl_uid.c.id,
3563 fingerprint = relation(Fingerprint)),
3564 extension = validator)
3566 mapper(UploadBlock, self.tbl_upload_blocks,
3567 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3568 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3569 uid = relation(Uid, backref="uploadblocks")))
3571 mapper(BinContents, self.tbl_bin_contents,
3573 binary = relation(DBBinary,
3574 backref=backref('contents', lazy='dynamic', cascade='all')),
3575 file = self.tbl_bin_contents.c.file))
3577 mapper(SrcContents, self.tbl_src_contents,
3579 source = relation(DBSource,
3580 backref=backref('contents', lazy='dynamic', cascade='all')),
3581 file = self.tbl_src_contents.c.file))
3583 mapper(MetadataKey, self.tbl_metadata_keys,
3585 key_id = self.tbl_metadata_keys.c.key_id,
3586 key = self.tbl_metadata_keys.c.key))
3588 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3590 binary_id = self.tbl_binaries_metadata.c.bin_id,
3591 binary = relation(DBBinary),
3592 key_id = self.tbl_binaries_metadata.c.key_id,
3593 key = relation(MetadataKey),
3594 value = self.tbl_binaries_metadata.c.value))
3596 mapper(SourceMetadata, self.tbl_source_metadata,
3598 source_id = self.tbl_source_metadata.c.src_id,
3599 source = relation(DBSource),
3600 key_id = self.tbl_source_metadata.c.key_id,
3601 key = relation(MetadataKey),
3602 value = self.tbl_source_metadata.c.value))
3604 mapper(VersionCheck, self.tbl_version_check,
3606 suite_id = self.tbl_version_check.c.suite,
3607 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
3608 reference_id = self.tbl_version_check.c.reference,
3609 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
3611 ## Connection functions
3612 def __createconn(self):
3613 from config import Config
3615 if cnf.has_key("DB::Service"):
3616 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3617 elif cnf.has_key("DB::Host"):
3619 connstr = "postgresql://%s" % cnf["DB::Host"]
3620 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3621 connstr += ":%s" % cnf["DB::Port"]
3622 connstr += "/%s" % cnf["DB::Name"]
3625 connstr = "postgresql:///%s" % cnf["DB::Name"]
3626 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3627 connstr += "?port=%s" % cnf["DB::Port"]
3629 engine_args = { 'echo': self.debug }
3630 if cnf.has_key('DB::PoolSize'):
3631 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3632 if cnf.has_key('DB::MaxOverflow'):
3633 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3634 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3635 cnf['DB::Unicode'] == 'false':
3636 engine_args['use_native_unicode'] = False
3638 # Monkey patch a new dialect in in order to support service= syntax
3639 import sqlalchemy.dialects.postgresql
3640 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3641 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3642 def create_connect_args(self, url):
3643 if str(url).startswith('postgresql://service='):
3645 servicename = str(url)[21:]
3646 return (['service=%s' % servicename], {})
3648 return PGDialect_psycopg2.create_connect_args(self, url)
3650 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3653 self.db_pg = create_engine(connstr, **engine_args)
3654 self.db_meta = MetaData()
3655 self.db_meta.bind = self.db_pg
3656 self.db_smaker = sessionmaker(bind=self.db_pg,
3660 self.__setuptables()
3661 self.__setupmappers()
3663 except OperationalError as e:
3665 utils.fubar("Cannot connect to database (%s)" % str(e))
3667 self.pid = os.getpid()
3669 def session(self, work_mem = 0):
3671 Returns a new session object. If a work_mem parameter is provided a new
3672 transaction is started and the work_mem parameter is set for this
3673 transaction. The work_mem parameter is measured in MB. A default value
3674 will be used if the parameter is not set.
3676 # reinitialize DBConn in new processes
3677 if self.pid != os.getpid():
3680 session = self.db_smaker()
3682 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
3685 __all__.append('DBConn')