5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
277 def get(cls, primary_key, session = None):
279 This is a support function that allows getting an object by its primary
282 Architecture.get(3[, session])
284 instead of the more verbose
286 session.query(Architecture).get(3)
288 return session.query(cls).get(primary_key)
290 __all__.append('ORMObject')
292 ################################################################################
294 class Validator(MapperExtension):
296 This class calls the validate() method for each instance for the
297 'before_update' and 'before_insert' events. A global object validator is
298 used for configuring the individual mappers.
301 def before_update(self, mapper, connection, instance):
305 def before_insert(self, mapper, connection, instance):
309 validator = Validator()
311 ################################################################################
313 class Architecture(ORMObject):
314 def __init__(self, arch_string = None, description = None):
315 self.arch_string = arch_string
316 self.description = description
318 def __eq__(self, val):
319 if isinstance(val, str):
320 return (self.arch_string== val)
321 # This signals to use the normal comparison operator
322 return NotImplemented
324 def __ne__(self, val):
325 if isinstance(val, str):
326 return (self.arch_string != val)
327 # This signals to use the normal comparison operator
328 return NotImplemented
330 def properties(self):
331 return ['arch_string', 'arch_id', 'suites_count']
333 def not_null_constraints(self):
334 return ['arch_string']
336 __all__.append('Architecture')
339 def get_architecture(architecture, session=None):
341 Returns database id for given C{architecture}.
343 @type architecture: string
344 @param architecture: The name of the architecture
346 @type session: Session
347 @param session: Optional SQLA session object (a temporary one will be
348 generated if not supplied)
351 @return: Architecture object for the given arch (None if not present)
354 q = session.query(Architecture).filter_by(arch_string=architecture)
358 except NoResultFound:
361 __all__.append('get_architecture')
363 # TODO: should be removed because the implementation is too trivial
365 def get_architecture_suites(architecture, session=None):
367 Returns list of Suite objects for given C{architecture} name
369 @type architecture: str
370 @param architecture: Architecture name to search for
372 @type session: Session
373 @param session: Optional SQL session object (a temporary one will be
374 generated if not supplied)
377 @return: list of Suite objects for the given name (may be empty)
380 return get_architecture(architecture, session).suites
382 __all__.append('get_architecture_suites')
384 ################################################################################
386 class Archive(object):
387 def __init__(self, *args, **kwargs):
391 return '<Archive %s>' % self.archive_name
393 __all__.append('Archive')
396 def get_archive(archive, session=None):
398 returns database id for given C{archive}.
400 @type archive: string
401 @param archive: the name of the arhive
403 @type session: Session
404 @param session: Optional SQLA session object (a temporary one will be
405 generated if not supplied)
408 @return: Archive object for the given name (None if not present)
411 archive = archive.lower()
413 q = session.query(Archive).filter_by(archive_name=archive)
417 except NoResultFound:
420 __all__.append('get_archive')
422 ################################################################################
424 class BinAssociation(object):
425 def __init__(self, *args, **kwargs):
429 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
431 __all__.append('BinAssociation')
433 ################################################################################
435 class BinContents(object):
436 def __init__(self, *args, **kwargs):
440 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
442 __all__.append('BinContents')
444 ################################################################################
446 class DBBinary(ORMObject):
447 def __init__(self, package = None, source = None, version = None, \
448 maintainer = None, architecture = None, poolfile = None, \
450 self.package = package
452 self.version = version
453 self.maintainer = maintainer
454 self.architecture = architecture
455 self.poolfile = poolfile
456 self.binarytype = binarytype
458 def properties(self):
459 return ['package', 'version', 'maintainer', 'source', 'architecture', \
460 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
461 'suites_count', 'binary_id']
463 def not_null_constraints(self):
464 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
467 __all__.append('DBBinary')
470 def get_suites_binary_in(package, session=None):
472 Returns list of Suite objects which given C{package} name is in
475 @param package: DBBinary package name to search for
478 @return: list of Suite objects for the given package
481 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
483 __all__.append('get_suites_binary_in')
486 def get_binaries_from_name(package, version=None, architecture=None, session=None):
488 Returns list of DBBinary objects for given C{package} name
491 @param package: DBBinary package name to search for
493 @type version: str or None
494 @param version: Version to search for (or None)
496 @type architecture: str, list or None
497 @param architecture: Architectures to limit to (or None if no limit)
499 @type session: Session
500 @param session: Optional SQL session object (a temporary one will be
501 generated if not supplied)
504 @return: list of DBBinary objects for the given name (may be empty)
507 q = session.query(DBBinary).filter_by(package=package)
509 if version is not None:
510 q = q.filter_by(version=version)
512 if architecture is not None:
513 if not isinstance(architecture, list):
514 architecture = [architecture]
515 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
521 __all__.append('get_binaries_from_name')
524 def get_binaries_from_source_id(source_id, session=None):
526 Returns list of DBBinary objects for given C{source_id}
529 @param source_id: source_id to search for
531 @type session: Session
532 @param session: Optional SQL session object (a temporary one will be
533 generated if not supplied)
536 @return: list of DBBinary objects for the given name (may be empty)
539 return session.query(DBBinary).filter_by(source_id=source_id).all()
541 __all__.append('get_binaries_from_source_id')
544 def get_binary_from_name_suite(package, suitename, session=None):
545 ### For dak examine-package
546 ### XXX: Doesn't use object API yet
548 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
549 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
550 WHERE b.package='%(package)s'
552 AND fi.location = l.id
553 AND l.component = c.id
556 AND su.suite_name %(suitename)s
557 ORDER BY b.version DESC"""
559 return session.execute(sql % {'package': package, 'suitename': suitename})
561 __all__.append('get_binary_from_name_suite')
564 def get_binary_components(package, suitename, arch, session=None):
565 # Check for packages that have moved from one component to another
566 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
567 WHERE b.package=:package AND s.suite_name=:suitename
568 AND (a.arch_string = :arch OR a.arch_string = 'all')
569 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
570 AND f.location = l.id
571 AND l.component = c.id
574 vals = {'package': package, 'suitename': suitename, 'arch': arch}
576 return session.execute(query, vals)
578 __all__.append('get_binary_components')
580 ################################################################################
582 class BinaryACL(object):
583 def __init__(self, *args, **kwargs):
587 return '<BinaryACL %s>' % self.binary_acl_id
589 __all__.append('BinaryACL')
591 ################################################################################
593 class BinaryACLMap(object):
594 def __init__(self, *args, **kwargs):
598 return '<BinaryACLMap %s>' % self.binary_acl_map_id
600 __all__.append('BinaryACLMap')
602 ################################################################################
607 ArchiveDir "%(archivepath)s";
608 OverrideDir "%(overridedir)s";
609 CacheDir "%(cachedir)s";
614 Packages::Compress ". bzip2 gzip";
615 Sources::Compress ". bzip2 gzip";
620 bindirectory "incoming"
625 BinOverride "override.sid.all3";
626 BinCacheDB "packages-accepted.db";
628 FileList "%(filelist)s";
631 Packages::Extensions ".deb .udeb";
634 bindirectory "incoming/"
637 BinOverride "override.sid.all3";
638 SrcOverride "override.sid.all3.src";
639 FileList "%(filelist)s";
643 class BuildQueue(object):
644 def __init__(self, *args, **kwargs):
648 return '<BuildQueue %s>' % self.queue_name
650 def write_metadata(self, starttime, force=False):
651 # Do we write out metafiles?
652 if not (force or self.generate_metadata):
655 session = DBConn().session().object_session(self)
657 fl_fd = fl_name = ac_fd = ac_name = None
659 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
660 startdir = os.getcwd()
663 # Grab files we want to include
664 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
665 # Write file list with newer files
666 (fl_fd, fl_name) = mkstemp()
668 os.write(fl_fd, '%s\n' % n.fullpath)
673 # Write minimal apt.conf
674 # TODO: Remove hardcoding from template
675 (ac_fd, ac_name) = mkstemp()
676 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
678 'cachedir': cnf["Dir::Cache"],
679 'overridedir': cnf["Dir::Override"],
683 # Run apt-ftparchive generate
684 os.chdir(os.path.dirname(ac_name))
685 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
687 # Run apt-ftparchive release
688 # TODO: Eww - fix this
689 bname = os.path.basename(self.path)
693 # We have to remove the Release file otherwise it'll be included in the
696 os.unlink(os.path.join(bname, 'Release'))
700 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
702 # Crude hack with open and append, but this whole section is and should be redone.
703 if self.notautomatic:
704 release=open("Release", "a")
705 release.write("NotAutomatic: yes")
710 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
711 if cnf.has_key("Dinstall::SigningPubKeyring"):
712 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
714 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
716 # Move the files if we got this far
717 os.rename('Release', os.path.join(bname, 'Release'))
719 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
721 # Clean up any left behind files
748 def clean_and_update(self, starttime, Logger, dryrun=False):
749 """WARNING: This routine commits for you"""
750 session = DBConn().session().object_session(self)
752 if self.generate_metadata and not dryrun:
753 self.write_metadata(starttime)
755 # Grab files older than our execution time
756 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
762 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
764 Logger.log(["I: Removing %s from the queue" % o.fullpath])
765 os.unlink(o.fullpath)
768 # If it wasn't there, don't worry
769 if e.errno == ENOENT:
772 # TODO: Replace with proper logging call
773 Logger.log(["E: Could not remove %s" % o.fullpath])
780 for f in os.listdir(self.path):
781 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
785 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
786 except NoResultFound:
787 fp = os.path.join(self.path, f)
789 Logger.log(["I: Would remove unused link %s" % fp])
791 Logger.log(["I: Removing unused link %s" % fp])
795 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
797 def add_file_from_pool(self, poolfile):
798 """Copies a file into the pool. Assumes that the PoolFile object is
799 attached to the same SQLAlchemy session as the Queue object is.
801 The caller is responsible for committing after calling this function."""
802 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
804 # Check if we have a file of this name or this ID already
805 for f in self.queuefiles:
806 if f.fileid is not None and f.fileid == poolfile.file_id or \
807 f.poolfile.filename == poolfile_basename:
808 # In this case, update the BuildQueueFile entry so we
809 # don't remove it too early
810 f.lastused = datetime.now()
811 DBConn().session().object_session(poolfile).add(f)
814 # Prepare BuildQueueFile object
815 qf = BuildQueueFile()
816 qf.build_queue_id = self.queue_id
817 qf.lastused = datetime.now()
818 qf.filename = poolfile_basename
820 targetpath = poolfile.fullpath
821 queuepath = os.path.join(self.path, poolfile_basename)
825 # We need to copy instead of symlink
827 utils.copy(targetpath, queuepath)
828 # NULL in the fileid field implies a copy
831 os.symlink(targetpath, queuepath)
832 qf.fileid = poolfile.file_id
836 # Get the same session as the PoolFile is using and add the qf to it
837 DBConn().session().object_session(poolfile).add(qf)
842 __all__.append('BuildQueue')
845 def get_build_queue(queuename, session=None):
847 Returns BuildQueue object for given C{queue name}, creating it if it does not
850 @type queuename: string
851 @param queuename: The name of the queue
853 @type session: Session
854 @param session: Optional SQLA session object (a temporary one will be
855 generated if not supplied)
858 @return: BuildQueue object for the given queue
861 q = session.query(BuildQueue).filter_by(queue_name=queuename)
865 except NoResultFound:
868 __all__.append('get_build_queue')
870 ################################################################################
872 class BuildQueueFile(object):
873 def __init__(self, *args, **kwargs):
877 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
881 return os.path.join(self.buildqueue.path, self.filename)
884 __all__.append('BuildQueueFile')
886 ################################################################################
888 class ChangePendingBinary(object):
889 def __init__(self, *args, **kwargs):
893 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
895 __all__.append('ChangePendingBinary')
897 ################################################################################
899 class ChangePendingFile(object):
900 def __init__(self, *args, **kwargs):
904 return '<ChangePendingFile %s>' % self.change_pending_file_id
906 __all__.append('ChangePendingFile')
908 ################################################################################
910 class ChangePendingSource(object):
911 def __init__(self, *args, **kwargs):
915 return '<ChangePendingSource %s>' % self.change_pending_source_id
917 __all__.append('ChangePendingSource')
919 ################################################################################
921 class Component(object):
922 def __init__(self, *args, **kwargs):
925 def __eq__(self, val):
926 if isinstance(val, str):
927 return (self.component_name == val)
928 # This signals to use the normal comparison operator
929 return NotImplemented
931 def __ne__(self, val):
932 if isinstance(val, str):
933 return (self.component_name != val)
934 # This signals to use the normal comparison operator
935 return NotImplemented
938 return '<Component %s>' % self.component_name
941 __all__.append('Component')
944 def get_component(component, session=None):
946 Returns database id for given C{component}.
948 @type component: string
949 @param component: The name of the override type
952 @return: the database id for the given component
955 component = component.lower()
957 q = session.query(Component).filter_by(component_name=component)
961 except NoResultFound:
964 __all__.append('get_component')
966 ################################################################################
968 class DBConfig(object):
969 def __init__(self, *args, **kwargs):
973 return '<DBConfig %s>' % self.name
975 __all__.append('DBConfig')
977 ################################################################################
980 def get_or_set_contents_file_id(filename, session=None):
982 Returns database id for given filename.
984 If no matching file is found, a row is inserted.
986 @type filename: string
987 @param filename: The filename
988 @type session: SQLAlchemy
989 @param session: Optional SQL session object (a temporary one will be
990 generated if not supplied). If not passed, a commit will be performed at
991 the end of the function, otherwise the caller is responsible for commiting.
994 @return: the database id for the given component
997 q = session.query(ContentFilename).filter_by(filename=filename)
1000 ret = q.one().cafilename_id
1001 except NoResultFound:
1002 cf = ContentFilename()
1003 cf.filename = filename
1005 session.commit_or_flush()
1006 ret = cf.cafilename_id
1010 __all__.append('get_or_set_contents_file_id')
1013 def get_contents(suite, overridetype, section=None, session=None):
1015 Returns contents for a suite / overridetype combination, limiting
1016 to a section if not None.
1019 @param suite: Suite object
1021 @type overridetype: OverrideType
1022 @param overridetype: OverrideType object
1024 @type section: Section
1025 @param section: Optional section object to limit results to
1027 @type session: SQLAlchemy
1028 @param session: Optional SQL session object (a temporary one will be
1029 generated if not supplied)
1031 @rtype: ResultsProxy
1032 @return: ResultsProxy object set up to return tuples of (filename, section,
1036 # find me all of the contents for a given suite
1037 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1041 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1042 JOIN content_file_names n ON (c.filename=n.id)
1043 JOIN binaries b ON (b.id=c.binary_pkg)
1044 JOIN override o ON (o.package=b.package)
1045 JOIN section s ON (s.id=o.section)
1046 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1047 AND b.type=:overridetypename"""
1049 vals = {'suiteid': suite.suite_id,
1050 'overridetypeid': overridetype.overridetype_id,
1051 'overridetypename': overridetype.overridetype}
1053 if section is not None:
1054 contents_q += " AND s.id = :sectionid"
1055 vals['sectionid'] = section.section_id
1057 contents_q += " ORDER BY fn"
1059 return session.execute(contents_q, vals)
1061 __all__.append('get_contents')
1063 ################################################################################
1065 class ContentFilepath(object):
1066 def __init__(self, *args, **kwargs):
1070 return '<ContentFilepath %s>' % self.filepath
1072 __all__.append('ContentFilepath')
1075 def get_or_set_contents_path_id(filepath, session=None):
1077 Returns database id for given path.
1079 If no matching file is found, a row is inserted.
1081 @type filepath: string
1082 @param filepath: The filepath
1084 @type session: SQLAlchemy
1085 @param session: Optional SQL session object (a temporary one will be
1086 generated if not supplied). If not passed, a commit will be performed at
1087 the end of the function, otherwise the caller is responsible for commiting.
1090 @return: the database id for the given path
1093 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1096 ret = q.one().cafilepath_id
1097 except NoResultFound:
1098 cf = ContentFilepath()
1099 cf.filepath = filepath
1101 session.commit_or_flush()
1102 ret = cf.cafilepath_id
1106 __all__.append('get_or_set_contents_path_id')
1108 ################################################################################
1110 class ContentAssociation(object):
1111 def __init__(self, *args, **kwargs):
1115 return '<ContentAssociation %s>' % self.ca_id
1117 __all__.append('ContentAssociation')
1119 def insert_content_paths(binary_id, fullpaths, session=None):
1121 Make sure given path is associated with given binary id
1123 @type binary_id: int
1124 @param binary_id: the id of the binary
1125 @type fullpaths: list
1126 @param fullpaths: the list of paths of the file being associated with the binary
1127 @type session: SQLAlchemy session
1128 @param session: Optional SQLAlchemy session. If this is passed, the caller
1129 is responsible for ensuring a transaction has begun and committing the
1130 results or rolling back based on the result code. If not passed, a commit
1131 will be performed at the end of the function, otherwise the caller is
1132 responsible for commiting.
1134 @return: True upon success
1137 privatetrans = False
1139 session = DBConn().session()
1144 def generate_path_dicts():
1145 for fullpath in fullpaths:
1146 if fullpath.startswith( './' ):
1147 fullpath = fullpath[2:]
1149 yield {'filename':fullpath, 'id': binary_id }
1151 for d in generate_path_dicts():
1152 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1161 traceback.print_exc()
1163 # Only rollback if we set up the session ourself
1170 __all__.append('insert_content_paths')
1172 ################################################################################
1174 class DSCFile(object):
1175 def __init__(self, *args, **kwargs):
1179 return '<DSCFile %s>' % self.dscfile_id
1181 __all__.append('DSCFile')
1184 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1186 Returns a list of DSCFiles which may be empty
1188 @type dscfile_id: int (optional)
1189 @param dscfile_id: the dscfile_id of the DSCFiles to find
1191 @type source_id: int (optional)
1192 @param source_id: the source id related to the DSCFiles to find
1194 @type poolfile_id: int (optional)
1195 @param poolfile_id: the poolfile id related to the DSCFiles to find
1198 @return: Possibly empty list of DSCFiles
1201 q = session.query(DSCFile)
1203 if dscfile_id is not None:
1204 q = q.filter_by(dscfile_id=dscfile_id)
1206 if source_id is not None:
1207 q = q.filter_by(source_id=source_id)
1209 if poolfile_id is not None:
1210 q = q.filter_by(poolfile_id=poolfile_id)
1214 __all__.append('get_dscfiles')
1216 ################################################################################
1218 class PoolFile(ORMObject):
1219 def __init__(self, filename = None, location = None, filesize = -1, \
1221 self.filename = filename
1222 self.location = location
1223 self.filesize = filesize
1224 self.md5sum = md5sum
1228 return os.path.join(self.location.path, self.filename)
1230 def is_valid(self, filesize = -1, md5sum = None):\
1231 return self.filesize == filesize and self.md5sum == md5sum
1233 def properties(self):
1234 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1235 'sha256sum', 'location', 'source', 'binary', 'last_used']
1237 def not_null_constraints(self):
1238 return ['filename', 'md5sum', 'location']
1240 __all__.append('PoolFile')
1243 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1246 (ValidFileFound [boolean], PoolFile object or None)
1248 @type filename: string
1249 @param filename: the filename of the file to check against the DB
1252 @param filesize: the size of the file to check against the DB
1254 @type md5sum: string
1255 @param md5sum: the md5sum of the file to check against the DB
1257 @type location_id: int
1258 @param location_id: the id of the location to look in
1261 @return: Tuple of length 2.
1262 - If valid pool file found: (C{True}, C{PoolFile object})
1263 - If valid pool file not found:
1264 - (C{False}, C{None}) if no file found
1265 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1268 poolfile = session.query(Location).get(location_id). \
1269 files.filter_by(filename=filename).first()
1271 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1274 return (valid, poolfile)
1276 __all__.append('check_poolfile')
1278 # TODO: the implementation can trivially be inlined at the place where the
1279 # function is called
1281 def get_poolfile_by_id(file_id, session=None):
1283 Returns a PoolFile objects or None for the given id
1286 @param file_id: the id of the file to look for
1288 @rtype: PoolFile or None
1289 @return: either the PoolFile object or None
1292 return session.query(PoolFile).get(file_id)
1294 __all__.append('get_poolfile_by_id')
1297 def get_poolfile_like_name(filename, session=None):
1299 Returns an array of PoolFile objects which are like the given name
1301 @type filename: string
1302 @param filename: the filename of the file to check against the DB
1305 @return: array of PoolFile objects
1308 # TODO: There must be a way of properly using bind parameters with %FOO%
1309 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1313 __all__.append('get_poolfile_like_name')
1316 def add_poolfile(filename, datadict, location_id, session=None):
1318 Add a new file to the pool
1320 @type filename: string
1321 @param filename: filename
1323 @type datadict: dict
1324 @param datadict: dict with needed data
1326 @type location_id: int
1327 @param location_id: database id of the location
1330 @return: the PoolFile object created
1332 poolfile = PoolFile()
1333 poolfile.filename = filename
1334 poolfile.filesize = datadict["size"]
1335 poolfile.md5sum = datadict["md5sum"]
1336 poolfile.sha1sum = datadict["sha1sum"]
1337 poolfile.sha256sum = datadict["sha256sum"]
1338 poolfile.location_id = location_id
1340 session.add(poolfile)
1341 # Flush to get a file id (NB: This is not a commit)
1346 __all__.append('add_poolfile')
1348 ################################################################################
1350 class Fingerprint(ORMObject):
1351 def __init__(self, fingerprint = None):
1352 self.fingerprint = fingerprint
1354 def properties(self):
1355 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1358 def not_null_constraints(self):
1359 return ['fingerprint']
1361 __all__.append('Fingerprint')
1364 def get_fingerprint(fpr, session=None):
1366 Returns Fingerprint object for given fpr.
1369 @param fpr: The fpr to find / add
1371 @type session: SQLAlchemy
1372 @param session: Optional SQL session object (a temporary one will be
1373 generated if not supplied).
1376 @return: the Fingerprint object for the given fpr or None
1379 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1383 except NoResultFound:
1388 __all__.append('get_fingerprint')
1391 def get_or_set_fingerprint(fpr, session=None):
1393 Returns Fingerprint object for given fpr.
1395 If no matching fpr is found, a row is inserted.
1398 @param fpr: The fpr to find / add
1400 @type session: SQLAlchemy
1401 @param session: Optional SQL session object (a temporary one will be
1402 generated if not supplied). If not passed, a commit will be performed at
1403 the end of the function, otherwise the caller is responsible for commiting.
1404 A flush will be performed either way.
1407 @return: the Fingerprint object for the given fpr
1410 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1414 except NoResultFound:
1415 fingerprint = Fingerprint()
1416 fingerprint.fingerprint = fpr
1417 session.add(fingerprint)
1418 session.commit_or_flush()
1423 __all__.append('get_or_set_fingerprint')
1425 ################################################################################
1427 # Helper routine for Keyring class
1428 def get_ldap_name(entry):
1430 for k in ["cn", "mn", "sn"]:
1432 if ret and ret[0] != "" and ret[0] != "-":
1434 return " ".join(name)
1436 ################################################################################
1438 class Keyring(object):
1439 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1440 " --with-colons --fingerprint --fingerprint"
1445 def __init__(self, *args, **kwargs):
1449 return '<Keyring %s>' % self.keyring_name
1451 def de_escape_gpg_str(self, txt):
1452 esclist = re.split(r'(\\x..)', txt)
1453 for x in range(1,len(esclist),2):
1454 esclist[x] = "%c" % (int(esclist[x][2:],16))
1455 return "".join(esclist)
1457 def parse_address(self, uid):
1458 """parses uid and returns a tuple of real name and email address"""
1460 (name, address) = email.Utils.parseaddr(uid)
1461 name = re.sub(r"\s*[(].*[)]", "", name)
1462 name = self.de_escape_gpg_str(name)
1465 return (name, address)
1467 def load_keys(self, keyring):
1468 if not self.keyring_id:
1469 raise Exception('Must be initialized with database information')
1471 k = os.popen(self.gpg_invocation % keyring, "r")
1475 for line in k.xreadlines():
1476 field = line.split(":")
1477 if field[0] == "pub":
1480 (name, addr) = self.parse_address(field[9])
1482 self.keys[key]["email"] = addr
1483 self.keys[key]["name"] = name
1484 self.keys[key]["fingerprints"] = []
1486 elif key and field[0] == "sub" and len(field) >= 12:
1487 signingkey = ("s" in field[11])
1488 elif key and field[0] == "uid":
1489 (name, addr) = self.parse_address(field[9])
1490 if "email" not in self.keys[key] and "@" in addr:
1491 self.keys[key]["email"] = addr
1492 self.keys[key]["name"] = name
1493 elif signingkey and field[0] == "fpr":
1494 self.keys[key]["fingerprints"].append(field[9])
1495 self.fpr_lookup[field[9]] = key
1497 def import_users_from_ldap(self, session):
1501 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1502 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1504 l = ldap.open(LDAPServer)
1505 l.simple_bind_s("","")
1506 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1507 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1508 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1510 ldap_fin_uid_id = {}
1517 uid = entry["uid"][0]
1518 name = get_ldap_name(entry)
1519 fingerprints = entry["keyFingerPrint"]
1521 for f in fingerprints:
1522 key = self.fpr_lookup.get(f, None)
1523 if key not in self.keys:
1525 self.keys[key]["uid"] = uid
1529 keyid = get_or_set_uid(uid, session).uid_id
1530 byuid[keyid] = (uid, name)
1531 byname[uid] = (keyid, name)
1533 return (byname, byuid)
1535 def generate_users_from_keyring(self, format, session):
1539 for x in self.keys.keys():
1540 if "email" not in self.keys[x]:
1542 self.keys[x]["uid"] = format % "invalid-uid"
1544 uid = format % self.keys[x]["email"]
1545 keyid = get_or_set_uid(uid, session).uid_id
1546 byuid[keyid] = (uid, self.keys[x]["name"])
1547 byname[uid] = (keyid, self.keys[x]["name"])
1548 self.keys[x]["uid"] = uid
1551 uid = format % "invalid-uid"
1552 keyid = get_or_set_uid(uid, session).uid_id
1553 byuid[keyid] = (uid, "ungeneratable user id")
1554 byname[uid] = (keyid, "ungeneratable user id")
1556 return (byname, byuid)
1558 __all__.append('Keyring')
1561 def get_keyring(keyring, session=None):
1563 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1564 If C{keyring} already has an entry, simply return the existing Keyring
1566 @type keyring: string
1567 @param keyring: the keyring name
1570 @return: the Keyring object for this keyring
1573 q = session.query(Keyring).filter_by(keyring_name=keyring)
1577 except NoResultFound:
1580 __all__.append('get_keyring')
1582 ################################################################################
1584 class KeyringACLMap(object):
1585 def __init__(self, *args, **kwargs):
1589 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1591 __all__.append('KeyringACLMap')
1593 ################################################################################
1595 class DBChange(object):
1596 def __init__(self, *args, **kwargs):
1600 return '<DBChange %s>' % self.changesname
1602 def clean_from_queue(self):
1603 session = DBConn().session().object_session(self)
1605 # Remove changes_pool_files entries
1608 # Remove changes_pending_files references
1611 # Clear out of queue
1612 self.in_queue = None
1613 self.approved_for_id = None
1615 __all__.append('DBChange')
1618 def get_dbchange(filename, session=None):
1620 returns DBChange object for given C{filename}.
1622 @type filename: string
1623 @param filename: the name of the file
1625 @type session: Session
1626 @param session: Optional SQLA session object (a temporary one will be
1627 generated if not supplied)
1630 @return: DBChange object for the given filename (C{None} if not present)
1633 q = session.query(DBChange).filter_by(changesname=filename)
1637 except NoResultFound:
1640 __all__.append('get_dbchange')
1642 ################################################################################
1644 class Location(ORMObject):
1645 def __init__(self, path = None):
1647 # the column 'type' should go away, see comment at mapper
1648 self.archive_type = 'pool'
1650 def properties(self):
1651 return ['path', 'archive_type', 'component', 'files_count']
1653 def not_null_constraints(self):
1654 return ['path', 'archive_type']
1656 __all__.append('Location')
1659 def get_location(location, component=None, archive=None, session=None):
1661 Returns Location object for the given combination of location, component
1664 @type location: string
1665 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1667 @type component: string
1668 @param component: the component name (if None, no restriction applied)
1670 @type archive: string
1671 @param archive: the archive name (if None, no restriction applied)
1673 @rtype: Location / None
1674 @return: Either a Location object or None if one can't be found
1677 q = session.query(Location).filter_by(path=location)
1679 if archive is not None:
1680 q = q.join(Archive).filter_by(archive_name=archive)
1682 if component is not None:
1683 q = q.join(Component).filter_by(component_name=component)
1687 except NoResultFound:
1690 __all__.append('get_location')
1692 ################################################################################
1694 class Maintainer(ORMObject):
1695 def __init__(self, name = None):
1698 def properties(self):
1699 return ['name', 'maintainer_id']
1701 def not_null_constraints(self):
1704 def get_split_maintainer(self):
1705 if not hasattr(self, 'name') or self.name is None:
1706 return ('', '', '', '')
1708 return fix_maintainer(self.name.strip())
1710 __all__.append('Maintainer')
1713 def get_or_set_maintainer(name, session=None):
1715 Returns Maintainer object for given maintainer name.
1717 If no matching maintainer name is found, a row is inserted.
1720 @param name: The maintainer name to add
1722 @type session: SQLAlchemy
1723 @param session: Optional SQL session object (a temporary one will be
1724 generated if not supplied). If not passed, a commit will be performed at
1725 the end of the function, otherwise the caller is responsible for commiting.
1726 A flush will be performed either way.
1729 @return: the Maintainer object for the given maintainer
1732 q = session.query(Maintainer).filter_by(name=name)
1735 except NoResultFound:
1736 maintainer = Maintainer()
1737 maintainer.name = name
1738 session.add(maintainer)
1739 session.commit_or_flush()
1744 __all__.append('get_or_set_maintainer')
1747 def get_maintainer(maintainer_id, session=None):
1749 Return the name of the maintainer behind C{maintainer_id} or None if that
1750 maintainer_id is invalid.
1752 @type maintainer_id: int
1753 @param maintainer_id: the id of the maintainer
1756 @return: the Maintainer with this C{maintainer_id}
1759 return session.query(Maintainer).get(maintainer_id)
1761 __all__.append('get_maintainer')
1763 ################################################################################
1765 class NewComment(object):
1766 def __init__(self, *args, **kwargs):
1770 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1772 __all__.append('NewComment')
1775 def has_new_comment(package, version, session=None):
1777 Returns true if the given combination of C{package}, C{version} has a comment.
1779 @type package: string
1780 @param package: name of the package
1782 @type version: string
1783 @param version: package version
1785 @type session: Session
1786 @param session: Optional SQLA session object (a temporary one will be
1787 generated if not supplied)
1793 q = session.query(NewComment)
1794 q = q.filter_by(package=package)
1795 q = q.filter_by(version=version)
1797 return bool(q.count() > 0)
1799 __all__.append('has_new_comment')
1802 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1804 Returns (possibly empty) list of NewComment objects for the given
1807 @type package: string (optional)
1808 @param package: name of the package
1810 @type version: string (optional)
1811 @param version: package version
1813 @type comment_id: int (optional)
1814 @param comment_id: An id of a comment
1816 @type session: Session
1817 @param session: Optional SQLA session object (a temporary one will be
1818 generated if not supplied)
1821 @return: A (possibly empty) list of NewComment objects will be returned
1824 q = session.query(NewComment)
1825 if package is not None: q = q.filter_by(package=package)
1826 if version is not None: q = q.filter_by(version=version)
1827 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1831 __all__.append('get_new_comments')
1833 ################################################################################
1835 class Override(object):
1836 def __init__(self, *args, **kwargs):
1840 return '<Override %s (%s)>' % (self.package, self.suite_id)
1842 __all__.append('Override')
1845 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1847 Returns Override object for the given parameters
1849 @type package: string
1850 @param package: The name of the package
1852 @type suite: string, list or None
1853 @param suite: The name of the suite (or suites if a list) to limit to. If
1854 None, don't limit. Defaults to None.
1856 @type component: string, list or None
1857 @param component: The name of the component (or components if a list) to
1858 limit to. If None, don't limit. Defaults to None.
1860 @type overridetype: string, list or None
1861 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1862 limit to. If None, don't limit. Defaults to None.
1864 @type session: Session
1865 @param session: Optional SQLA session object (a temporary one will be
1866 generated if not supplied)
1869 @return: A (possibly empty) list of Override objects will be returned
1872 q = session.query(Override)
1873 q = q.filter_by(package=package)
1875 if suite is not None:
1876 if not isinstance(suite, list): suite = [suite]
1877 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1879 if component is not None:
1880 if not isinstance(component, list): component = [component]
1881 q = q.join(Component).filter(Component.component_name.in_(component))
1883 if overridetype is not None:
1884 if not isinstance(overridetype, list): overridetype = [overridetype]
1885 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1889 __all__.append('get_override')
1892 ################################################################################
1894 class OverrideType(object):
1895 def __init__(self, *args, **kwargs):
1899 return '<OverrideType %s>' % self.overridetype
1901 __all__.append('OverrideType')
1904 def get_override_type(override_type, session=None):
1906 Returns OverrideType object for given C{override type}.
1908 @type override_type: string
1909 @param override_type: The name of the override type
1911 @type session: Session
1912 @param session: Optional SQLA session object (a temporary one will be
1913 generated if not supplied)
1916 @return: the database id for the given override type
1919 q = session.query(OverrideType).filter_by(overridetype=override_type)
1923 except NoResultFound:
1926 __all__.append('get_override_type')
1928 ################################################################################
1930 class DebContents(object):
1931 def __init__(self, *args, **kwargs):
1935 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1937 __all__.append('DebContents')
1940 class UdebContents(object):
1941 def __init__(self, *args, **kwargs):
1945 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1947 __all__.append('UdebContents')
1949 class PendingBinContents(object):
1950 def __init__(self, *args, **kwargs):
1954 return '<PendingBinContents %s>' % self.contents_id
1956 __all__.append('PendingBinContents')
1958 def insert_pending_content_paths(package,
1963 Make sure given paths are temporarily associated with given
1967 @param package: the package to associate with should have been read in from the binary control file
1968 @type fullpaths: list
1969 @param fullpaths: the list of paths of the file being associated with the binary
1970 @type session: SQLAlchemy session
1971 @param session: Optional SQLAlchemy session. If this is passed, the caller
1972 is responsible for ensuring a transaction has begun and committing the
1973 results or rolling back based on the result code. If not passed, a commit
1974 will be performed at the end of the function
1976 @return: True upon success, False if there is a problem
1979 privatetrans = False
1982 session = DBConn().session()
1986 arch = get_architecture(package['Architecture'], session)
1987 arch_id = arch.arch_id
1989 # Remove any already existing recorded files for this package
1990 q = session.query(PendingBinContents)
1991 q = q.filter_by(package=package['Package'])
1992 q = q.filter_by(version=package['Version'])
1993 q = q.filter_by(architecture=arch_id)
1996 for fullpath in fullpaths:
1998 if fullpath.startswith( "./" ):
1999 fullpath = fullpath[2:]
2001 pca = PendingBinContents()
2002 pca.package = package['Package']
2003 pca.version = package['Version']
2005 pca.architecture = arch_id
2008 pca.type = 8 # gross
2010 pca.type = 7 # also gross
2013 # Only commit if we set up the session ourself
2021 except Exception, e:
2022 traceback.print_exc()
2024 # Only rollback if we set up the session ourself
2031 __all__.append('insert_pending_content_paths')
2033 ################################################################################
2035 class PolicyQueue(object):
2036 def __init__(self, *args, **kwargs):
2040 return '<PolicyQueue %s>' % self.queue_name
2042 __all__.append('PolicyQueue')
2045 def get_policy_queue(queuename, session=None):
2047 Returns PolicyQueue object for given C{queue name}
2049 @type queuename: string
2050 @param queuename: The name of the queue
2052 @type session: Session
2053 @param session: Optional SQLA session object (a temporary one will be
2054 generated if not supplied)
2057 @return: PolicyQueue object for the given queue
2060 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2064 except NoResultFound:
2067 __all__.append('get_policy_queue')
2070 def get_policy_queue_from_path(pathname, session=None):
2072 Returns PolicyQueue object for given C{path name}
2074 @type queuename: string
2075 @param queuename: The path
2077 @type session: Session
2078 @param session: Optional SQLA session object (a temporary one will be
2079 generated if not supplied)
2082 @return: PolicyQueue object for the given queue
2085 q = session.query(PolicyQueue).filter_by(path=pathname)
2089 except NoResultFound:
2092 __all__.append('get_policy_queue_from_path')
2094 ################################################################################
2096 class Priority(object):
2097 def __init__(self, *args, **kwargs):
2100 def __eq__(self, val):
2101 if isinstance(val, str):
2102 return (self.priority == val)
2103 # This signals to use the normal comparison operator
2104 return NotImplemented
2106 def __ne__(self, val):
2107 if isinstance(val, str):
2108 return (self.priority != val)
2109 # This signals to use the normal comparison operator
2110 return NotImplemented
2113 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2115 __all__.append('Priority')
2118 def get_priority(priority, session=None):
2120 Returns Priority object for given C{priority name}.
2122 @type priority: string
2123 @param priority: The name of the priority
2125 @type session: Session
2126 @param session: Optional SQLA session object (a temporary one will be
2127 generated if not supplied)
2130 @return: Priority object for the given priority
2133 q = session.query(Priority).filter_by(priority=priority)
2137 except NoResultFound:
2140 __all__.append('get_priority')
2143 def get_priorities(session=None):
2145 Returns dictionary of priority names -> id mappings
2147 @type session: Session
2148 @param session: Optional SQL session object (a temporary one will be
2149 generated if not supplied)
2152 @return: dictionary of priority names -> id mappings
2156 q = session.query(Priority)
2158 ret[x.priority] = x.priority_id
2162 __all__.append('get_priorities')
2164 ################################################################################
2166 class Section(object):
2167 def __init__(self, *args, **kwargs):
2170 def __eq__(self, val):
2171 if isinstance(val, str):
2172 return (self.section == val)
2173 # This signals to use the normal comparison operator
2174 return NotImplemented
2176 def __ne__(self, val):
2177 if isinstance(val, str):
2178 return (self.section != val)
2179 # This signals to use the normal comparison operator
2180 return NotImplemented
2183 return '<Section %s>' % self.section
2185 __all__.append('Section')
2188 def get_section(section, session=None):
2190 Returns Section object for given C{section name}.
2192 @type section: string
2193 @param section: The name of the section
2195 @type session: Session
2196 @param session: Optional SQLA session object (a temporary one will be
2197 generated if not supplied)
2200 @return: Section object for the given section name
2203 q = session.query(Section).filter_by(section=section)
2207 except NoResultFound:
2210 __all__.append('get_section')
2213 def get_sections(session=None):
2215 Returns dictionary of section names -> id mappings
2217 @type session: Session
2218 @param session: Optional SQL session object (a temporary one will be
2219 generated if not supplied)
2222 @return: dictionary of section names -> id mappings
2226 q = session.query(Section)
2228 ret[x.section] = x.section_id
2232 __all__.append('get_sections')
2234 ################################################################################
2236 class DBSource(ORMObject):
2237 def __init__(self, source = None, version = None, maintainer = None, \
2238 changedby = None, poolfile = None, install_date = None):
2239 self.source = source
2240 self.version = version
2241 self.maintainer = maintainer
2242 self.changedby = changedby
2243 self.poolfile = poolfile
2244 self.install_date = install_date
2246 def properties(self):
2247 return ['source', 'source_id', 'maintainer', 'changedby', \
2248 'fingerprint', 'poolfile', 'version', 'suites_count', \
2249 'install_date', 'binaries_count']
2251 def not_null_constraints(self):
2252 return ['source', 'version', 'install_date', 'maintainer', \
2253 'changedby', 'poolfile', 'install_date']
2255 __all__.append('DBSource')
2258 def source_exists(source, source_version, suites = ["any"], session=None):
2260 Ensure that source exists somewhere in the archive for the binary
2261 upload being processed.
2262 1. exact match => 1.0-3
2263 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2265 @type source: string
2266 @param source: source name
2268 @type source_version: string
2269 @param source_version: expected source version
2272 @param suites: list of suites to check in, default I{any}
2274 @type session: Session
2275 @param session: Optional SQLA session object (a temporary one will be
2276 generated if not supplied)
2279 @return: returns 1 if a source with expected version is found, otherwise 0
2286 from daklib.regexes import re_bin_only_nmu
2287 orig_source_version = re_bin_only_nmu.sub('', source_version)
2289 for suite in suites:
2290 q = session.query(DBSource).filter_by(source=source). \
2291 filter(DBSource.version.in_([source_version, orig_source_version]))
2293 # source must exist in suite X, or in some other suite that's
2294 # mapped to X, recursively... silent-maps are counted too,
2295 # unreleased-maps aren't.
2296 maps = cnf.ValueList("SuiteMappings")[:]
2298 maps = [ m.split() for m in maps ]
2299 maps = [ (x[1], x[2]) for x in maps
2300 if x[0] == "map" or x[0] == "silent-map" ]
2303 if x[1] in s and x[0] not in s:
2306 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2311 # No source found so return not ok
2316 __all__.append('source_exists')
2319 def get_suites_source_in(source, session=None):
2321 Returns list of Suite objects which given C{source} name is in
2324 @param source: DBSource package name to search for
2327 @return: list of Suite objects for the given source
2330 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2332 __all__.append('get_suites_source_in')
2335 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2337 Returns list of DBSource objects for given C{source} name and other parameters
2340 @param source: DBSource package name to search for
2342 @type version: str or None
2343 @param version: DBSource version name to search for or None if not applicable
2345 @type dm_upload_allowed: bool
2346 @param dm_upload_allowed: If None, no effect. If True or False, only
2347 return packages with that dm_upload_allowed setting
2349 @type session: Session
2350 @param session: Optional SQL session object (a temporary one will be
2351 generated if not supplied)
2354 @return: list of DBSource objects for the given name (may be empty)
2357 q = session.query(DBSource).filter_by(source=source)
2359 if version is not None:
2360 q = q.filter_by(version=version)
2362 if dm_upload_allowed is not None:
2363 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2367 __all__.append('get_sources_from_name')
2369 # FIXME: This function fails badly if it finds more than 1 source package and
2370 # its implementation is trivial enough to be inlined.
2372 def get_source_in_suite(source, suite, session=None):
2374 Returns a DBSource object for a combination of C{source} and C{suite}.
2376 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2377 - B{suite} - a suite name, eg. I{unstable}
2379 @type source: string
2380 @param source: source package name
2383 @param suite: the suite name
2386 @return: the version for I{source} in I{suite}
2390 q = get_suite(suite, session).get_sources(source)
2393 except NoResultFound:
2396 __all__.append('get_source_in_suite')
2398 ################################################################################
2401 def add_dsc_to_db(u, filename, session=None):
2402 entry = u.pkg.files[filename]
2406 source.source = u.pkg.dsc["source"]
2407 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2408 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2409 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2410 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2411 source.install_date = datetime.now().date()
2413 dsc_component = entry["component"]
2414 dsc_location_id = entry["location id"]
2416 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2418 # Set up a new poolfile if necessary
2419 if not entry.has_key("files id") or not entry["files id"]:
2420 filename = entry["pool name"] + filename
2421 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2423 pfs.append(poolfile)
2424 entry["files id"] = poolfile.file_id
2426 source.poolfile_id = entry["files id"]
2429 suite_names = u.pkg.changes["distribution"].keys()
2430 source.suites = session.query(Suite). \
2431 filter(Suite.suite_name.in_(suite_names)).all()
2433 # Add the source files to the DB (files and dsc_files)
2435 dscfile.source_id = source.source_id
2436 dscfile.poolfile_id = entry["files id"]
2437 session.add(dscfile)
2439 for dsc_file, dentry in u.pkg.dsc_files.items():
2441 df.source_id = source.source_id
2443 # If the .orig tarball is already in the pool, it's
2444 # files id is stored in dsc_files by check_dsc().
2445 files_id = dentry.get("files id", None)
2447 # Find the entry in the files hash
2448 # TODO: Bail out here properly
2450 for f, e in u.pkg.files.items():
2455 if files_id is None:
2456 filename = dfentry["pool name"] + dsc_file
2458 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2459 # FIXME: needs to check for -1/-2 and or handle exception
2460 if found and obj is not None:
2461 files_id = obj.file_id
2464 # If still not found, add it
2465 if files_id is None:
2466 # HACK: Force sha1sum etc into dentry
2467 dentry["sha1sum"] = dfentry["sha1sum"]
2468 dentry["sha256sum"] = dfentry["sha256sum"]
2469 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2470 pfs.append(poolfile)
2471 files_id = poolfile.file_id
2473 poolfile = get_poolfile_by_id(files_id, session)
2474 if poolfile is None:
2475 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2476 pfs.append(poolfile)
2478 df.poolfile_id = files_id
2481 # Add the src_uploaders to the DB
2482 uploader_ids = [source.maintainer_id]
2483 if u.pkg.dsc.has_key("uploaders"):
2484 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2486 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2489 for up_id in uploader_ids:
2490 if added_ids.has_key(up_id):
2492 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2498 su.maintainer_id = up_id
2499 su.source_id = source.source_id
2504 return source, dsc_component, dsc_location_id, pfs
2506 __all__.append('add_dsc_to_db')
2509 def add_deb_to_db(u, filename, session=None):
2511 Contrary to what you might expect, this routine deals with both
2512 debs and udebs. That info is in 'dbtype', whilst 'type' is
2513 'deb' for both of them
2516 entry = u.pkg.files[filename]
2519 bin.package = entry["package"]
2520 bin.version = entry["version"]
2521 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2522 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2523 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2524 bin.binarytype = entry["dbtype"]
2527 filename = entry["pool name"] + filename
2528 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2529 if not entry.get("location id", None):
2530 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2532 if entry.get("files id", None):
2533 poolfile = get_poolfile_by_id(bin.poolfile_id)
2534 bin.poolfile_id = entry["files id"]
2536 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2537 bin.poolfile_id = entry["files id"] = poolfile.file_id
2540 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2541 if len(bin_sources) != 1:
2542 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2543 (bin.package, bin.version, entry["architecture"],
2544 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2546 bin.source_id = bin_sources[0].source_id
2548 # Add and flush object so it has an ID
2551 suite_names = u.pkg.changes["distribution"].keys()
2552 bin.suites = session.query(Suite). \
2553 filter(Suite.suite_name.in_(suite_names)).all()
2557 # Deal with contents - disabled for now
2558 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2560 # print "REJECT\nCould not determine contents of package %s" % bin.package
2561 # session.rollback()
2562 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2566 __all__.append('add_deb_to_db')
2568 ################################################################################
2570 class SourceACL(object):
2571 def __init__(self, *args, **kwargs):
2575 return '<SourceACL %s>' % self.source_acl_id
2577 __all__.append('SourceACL')
2579 ################################################################################
2581 class SrcFormat(object):
2582 def __init__(self, *args, **kwargs):
2586 return '<SrcFormat %s>' % (self.format_name)
2588 __all__.append('SrcFormat')
2590 ################################################################################
2592 class SrcUploader(object):
2593 def __init__(self, *args, **kwargs):
2597 return '<SrcUploader %s>' % self.uploader_id
2599 __all__.append('SrcUploader')
2601 ################################################################################
2603 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2604 ('SuiteID', 'suite_id'),
2605 ('Version', 'version'),
2606 ('Origin', 'origin'),
2608 ('Description', 'description'),
2609 ('Untouchable', 'untouchable'),
2610 ('Announce', 'announce'),
2611 ('Codename', 'codename'),
2612 ('OverrideCodename', 'overridecodename'),
2613 ('ValidTime', 'validtime'),
2614 ('Priority', 'priority'),
2615 ('NotAutomatic', 'notautomatic'),
2616 ('CopyChanges', 'copychanges'),
2617 ('OverrideSuite', 'overridesuite')]
2619 # Why the heck don't we have any UNIQUE constraints in table suite?
2620 # TODO: Add UNIQUE constraints for appropriate columns.
2621 class Suite(ORMObject):
2622 def __init__(self, suite_name = None, version = None):
2623 self.suite_name = suite_name
2624 self.version = version
2626 def properties(self):
2627 return ['suite_name', 'version', 'sources_count', 'binaries_count']
2629 def not_null_constraints(self):
2630 return ['suite_name', 'version']
2632 def __eq__(self, val):
2633 if isinstance(val, str):
2634 return (self.suite_name == val)
2635 # This signals to use the normal comparison operator
2636 return NotImplemented
2638 def __ne__(self, val):
2639 if isinstance(val, str):
2640 return (self.suite_name != val)
2641 # This signals to use the normal comparison operator
2642 return NotImplemented
2646 for disp, field in SUITE_FIELDS:
2647 val = getattr(self, field, None)
2649 ret.append("%s: %s" % (disp, val))
2651 return "\n".join(ret)
2653 def get_architectures(self, skipsrc=False, skipall=False):
2655 Returns list of Architecture objects
2657 @type skipsrc: boolean
2658 @param skipsrc: Whether to skip returning the 'source' architecture entry
2661 @type skipall: boolean
2662 @param skipall: Whether to skip returning the 'all' architecture entry
2666 @return: list of Architecture objects for the given name (may be empty)
2669 q = object_session(self).query(Architecture).with_parent(self)
2671 q = q.filter(Architecture.arch_string != 'source')
2673 q = q.filter(Architecture.arch_string != 'all')
2674 return q.order_by(Architecture.arch_string).all()
2676 def get_sources(self, source):
2678 Returns a query object representing DBSource that is part of C{suite}.
2680 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2682 @type source: string
2683 @param source: source package name
2685 @rtype: sqlalchemy.orm.query.Query
2686 @return: a query of DBSource
2690 session = object_session(self)
2691 return session.query(DBSource).filter_by(source = source). \
2694 __all__.append('Suite')
2697 def get_suite(suite, session=None):
2699 Returns Suite object for given C{suite name}.
2702 @param suite: The name of the suite
2704 @type session: Session
2705 @param session: Optional SQLA session object (a temporary one will be
2706 generated if not supplied)
2709 @return: Suite object for the requested suite name (None if not present)
2712 q = session.query(Suite).filter_by(suite_name=suite)
2716 except NoResultFound:
2719 __all__.append('get_suite')
2721 ################################################################################
2723 # TODO: should be removed because the implementation is too trivial
2725 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2727 Returns list of Architecture objects for given C{suite} name
2730 @param suite: Suite name to search for
2732 @type skipsrc: boolean
2733 @param skipsrc: Whether to skip returning the 'source' architecture entry
2736 @type skipall: boolean
2737 @param skipall: Whether to skip returning the 'all' architecture entry
2740 @type session: Session
2741 @param session: Optional SQL session object (a temporary one will be
2742 generated if not supplied)
2745 @return: list of Architecture objects for the given name (may be empty)
2748 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2750 __all__.append('get_suite_architectures')
2752 ################################################################################
2754 class SuiteSrcFormat(object):
2755 def __init__(self, *args, **kwargs):
2759 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2761 __all__.append('SuiteSrcFormat')
2764 def get_suite_src_formats(suite, session=None):
2766 Returns list of allowed SrcFormat for C{suite}.
2769 @param suite: Suite name to search for
2771 @type session: Session
2772 @param session: Optional SQL session object (a temporary one will be
2773 generated if not supplied)
2776 @return: the list of allowed source formats for I{suite}
2779 q = session.query(SrcFormat)
2780 q = q.join(SuiteSrcFormat)
2781 q = q.join(Suite).filter_by(suite_name=suite)
2782 q = q.order_by('format_name')
2786 __all__.append('get_suite_src_formats')
2788 ################################################################################
2790 class Uid(ORMObject):
2791 def __init__(self, uid = None, name = None):
2795 def __eq__(self, val):
2796 if isinstance(val, str):
2797 return (self.uid == val)
2798 # This signals to use the normal comparison operator
2799 return NotImplemented
2801 def __ne__(self, val):
2802 if isinstance(val, str):
2803 return (self.uid != val)
2804 # This signals to use the normal comparison operator
2805 return NotImplemented
2807 def properties(self):
2808 return ['uid', 'name', 'fingerprint']
2810 def not_null_constraints(self):
2813 __all__.append('Uid')
2816 def get_or_set_uid(uidname, session=None):
2818 Returns uid object for given uidname.
2820 If no matching uidname is found, a row is inserted.
2822 @type uidname: string
2823 @param uidname: The uid to add
2825 @type session: SQLAlchemy
2826 @param session: Optional SQL session object (a temporary one will be
2827 generated if not supplied). If not passed, a commit will be performed at
2828 the end of the function, otherwise the caller is responsible for commiting.
2831 @return: the uid object for the given uidname
2834 q = session.query(Uid).filter_by(uid=uidname)
2838 except NoResultFound:
2842 session.commit_or_flush()
2847 __all__.append('get_or_set_uid')
2850 def get_uid_from_fingerprint(fpr, session=None):
2851 q = session.query(Uid)
2852 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2856 except NoResultFound:
2859 __all__.append('get_uid_from_fingerprint')
2861 ################################################################################
2863 class UploadBlock(object):
2864 def __init__(self, *args, **kwargs):
2868 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2870 __all__.append('UploadBlock')
2872 ################################################################################
2874 class DBConn(object):
2876 database module init.
2880 def __init__(self, *args, **kwargs):
2881 self.__dict__ = self.__shared_state
2883 if not getattr(self, 'initialised', False):
2884 self.initialised = True
2885 self.debug = kwargs.has_key('debug')
2888 def __setuptables(self):
2889 tables_with_primary = (
2900 'changes_pending_binaries',
2901 'changes_pending_files',
2902 'changes_pending_source',
2912 'pending_bin_contents',
2924 # The following tables have primary keys but sqlalchemy
2925 # version 0.5 fails to reflect them correctly with database
2926 # versions before upgrade #41.
2928 #'build_queue_files',
2931 tables_no_primary = (
2933 'changes_pending_files_map',
2934 'changes_pending_source_files',
2935 'changes_pool_files',
2938 'suite_architectures',
2939 'suite_src_formats',
2940 'suite_build_queue_copy',
2942 # see the comment above
2944 'build_queue_files',
2948 'almost_obsolete_all_associations',
2949 'almost_obsolete_src_associations',
2950 'any_associations_source',
2951 'bin_assoc_by_arch',
2952 'bin_associations_binaries',
2953 'binaries_suite_arch',
2954 'binfiles_suite_component_arch',
2957 'newest_all_associations',
2958 'newest_any_associations',
2960 'newest_src_association',
2961 'obsolete_all_associations',
2962 'obsolete_any_associations',
2963 'obsolete_any_by_all_associations',
2964 'obsolete_src_associations',
2966 'src_associations_bin',
2967 'src_associations_src',
2968 'suite_arch_by_name',
2971 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2972 # correctly and that is why we have to use a workaround. It can
2973 # be removed as soon as we switch to version 0.6.
2974 for table_name in tables_with_primary:
2975 table = Table(table_name, self.db_meta, \
2976 Column('id', Integer, primary_key = True), \
2977 autoload=True, useexisting=True)
2978 setattr(self, 'tbl_%s' % table_name, table)
2980 for table_name in tables_no_primary:
2981 table = Table(table_name, self.db_meta, autoload=True)
2982 setattr(self, 'tbl_%s' % table_name, table)
2984 for view_name in views:
2985 view = Table(view_name, self.db_meta, autoload=True)
2986 setattr(self, 'view_%s' % view_name, view)
2988 def __setupmappers(self):
2989 mapper(Architecture, self.tbl_architecture,
2990 properties = dict(arch_id = self.tbl_architecture.c.id,
2991 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2992 order_by='suite_name',
2993 backref=backref('architectures', order_by='arch_string'))),
2994 extension = validator)
2996 mapper(Archive, self.tbl_archive,
2997 properties = dict(archive_id = self.tbl_archive.c.id,
2998 archive_name = self.tbl_archive.c.name))
3000 mapper(BinAssociation, self.tbl_bin_associations,
3001 properties = dict(ba_id = self.tbl_bin_associations.c.id,
3002 suite_id = self.tbl_bin_associations.c.suite,
3003 suite = relation(Suite),
3004 binary_id = self.tbl_bin_associations.c.bin,
3005 binary = relation(DBBinary)))
3007 mapper(PendingBinContents, self.tbl_pending_bin_contents,
3008 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3009 filename = self.tbl_pending_bin_contents.c.filename,
3010 package = self.tbl_pending_bin_contents.c.package,
3011 version = self.tbl_pending_bin_contents.c.version,
3012 arch = self.tbl_pending_bin_contents.c.arch,
3013 otype = self.tbl_pending_bin_contents.c.type))
3015 mapper(DebContents, self.tbl_deb_contents,
3016 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3017 package=self.tbl_deb_contents.c.package,
3018 suite=self.tbl_deb_contents.c.suite,
3019 arch=self.tbl_deb_contents.c.arch,
3020 section=self.tbl_deb_contents.c.section,
3021 filename=self.tbl_deb_contents.c.filename))
3023 mapper(UdebContents, self.tbl_udeb_contents,
3024 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3025 package=self.tbl_udeb_contents.c.package,
3026 suite=self.tbl_udeb_contents.c.suite,
3027 arch=self.tbl_udeb_contents.c.arch,
3028 section=self.tbl_udeb_contents.c.section,
3029 filename=self.tbl_udeb_contents.c.filename))
3031 mapper(BuildQueue, self.tbl_build_queue,
3032 properties = dict(queue_id = self.tbl_build_queue.c.id))
3034 mapper(BuildQueueFile, self.tbl_build_queue_files,
3035 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3036 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3038 mapper(DBBinary, self.tbl_binaries,
3039 properties = dict(binary_id = self.tbl_binaries.c.id,
3040 package = self.tbl_binaries.c.package,
3041 version = self.tbl_binaries.c.version,
3042 maintainer_id = self.tbl_binaries.c.maintainer,
3043 maintainer = relation(Maintainer),
3044 source_id = self.tbl_binaries.c.source,
3045 source = relation(DBSource, backref='binaries'),
3046 arch_id = self.tbl_binaries.c.architecture,
3047 architecture = relation(Architecture),
3048 poolfile_id = self.tbl_binaries.c.file,
3049 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3050 binarytype = self.tbl_binaries.c.type,
3051 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3052 fingerprint = relation(Fingerprint),
3053 install_date = self.tbl_binaries.c.install_date,
3054 suites = relation(Suite, secondary=self.tbl_bin_associations,
3055 backref=backref('binaries', lazy='dynamic')),
3056 binassociations = relation(BinAssociation,
3057 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))),
3058 extension = validator)
3060 mapper(BinaryACL, self.tbl_binary_acl,
3061 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3063 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3064 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3065 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3066 architecture = relation(Architecture)))
3068 mapper(Component, self.tbl_component,
3069 properties = dict(component_id = self.tbl_component.c.id,
3070 component_name = self.tbl_component.c.name))
3072 mapper(DBConfig, self.tbl_config,
3073 properties = dict(config_id = self.tbl_config.c.id))
3075 mapper(DSCFile, self.tbl_dsc_files,
3076 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3077 source_id = self.tbl_dsc_files.c.source,
3078 source = relation(DBSource),
3079 poolfile_id = self.tbl_dsc_files.c.file,
3080 poolfile = relation(PoolFile)))
3082 mapper(PoolFile, self.tbl_files,
3083 properties = dict(file_id = self.tbl_files.c.id,
3084 filesize = self.tbl_files.c.size,
3085 location_id = self.tbl_files.c.location,
3086 location = relation(Location,
3087 # using lazy='dynamic' in the back
3088 # reference because we have A LOT of
3089 # files in one location
3090 backref=backref('files', lazy='dynamic'))),
3091 extension = validator)
3093 mapper(Fingerprint, self.tbl_fingerprint,
3094 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3095 uid_id = self.tbl_fingerprint.c.uid,
3096 uid = relation(Uid),
3097 keyring_id = self.tbl_fingerprint.c.keyring,
3098 keyring = relation(Keyring),
3099 source_acl = relation(SourceACL),
3100 binary_acl = relation(BinaryACL)),
3101 extension = validator)
3103 mapper(Keyring, self.tbl_keyrings,
3104 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3105 keyring_id = self.tbl_keyrings.c.id))
3107 mapper(DBChange, self.tbl_changes,
3108 properties = dict(change_id = self.tbl_changes.c.id,
3109 poolfiles = relation(PoolFile,
3110 secondary=self.tbl_changes_pool_files,
3111 backref="changeslinks"),
3112 seen = self.tbl_changes.c.seen,
3113 source = self.tbl_changes.c.source,
3114 binaries = self.tbl_changes.c.binaries,
3115 architecture = self.tbl_changes.c.architecture,
3116 distribution = self.tbl_changes.c.distribution,
3117 urgency = self.tbl_changes.c.urgency,
3118 maintainer = self.tbl_changes.c.maintainer,
3119 changedby = self.tbl_changes.c.changedby,
3120 date = self.tbl_changes.c.date,
3121 version = self.tbl_changes.c.version,
3122 files = relation(ChangePendingFile,
3123 secondary=self.tbl_changes_pending_files_map,
3124 backref="changesfile"),
3125 in_queue_id = self.tbl_changes.c.in_queue,
3126 in_queue = relation(PolicyQueue,
3127 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3128 approved_for_id = self.tbl_changes.c.approved_for))
3130 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3131 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3133 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3134 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3135 filename = self.tbl_changes_pending_files.c.filename,
3136 size = self.tbl_changes_pending_files.c.size,
3137 md5sum = self.tbl_changes_pending_files.c.md5sum,
3138 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3139 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3141 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3142 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3143 change = relation(DBChange),
3144 maintainer = relation(Maintainer,
3145 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3146 changedby = relation(Maintainer,
3147 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3148 fingerprint = relation(Fingerprint),
3149 source_files = relation(ChangePendingFile,
3150 secondary=self.tbl_changes_pending_source_files,
3151 backref="pending_sources")))
3154 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3155 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3156 keyring = relation(Keyring, backref="keyring_acl_map"),
3157 architecture = relation(Architecture)))
3159 mapper(Location, self.tbl_location,
3160 properties = dict(location_id = self.tbl_location.c.id,
3161 component_id = self.tbl_location.c.component,
3162 component = relation(Component),
3163 archive_id = self.tbl_location.c.archive,
3164 archive = relation(Archive),
3165 # FIXME: the 'type' column is old cruft and
3166 # should be removed in the future.
3167 archive_type = self.tbl_location.c.type),
3168 extension = validator)
3170 mapper(Maintainer, self.tbl_maintainer,
3171 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3172 maintains_sources = relation(DBSource, backref='maintainer',
3173 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3174 changed_sources = relation(DBSource, backref='changedby',
3175 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3176 extension = validator)
3178 mapper(NewComment, self.tbl_new_comments,
3179 properties = dict(comment_id = self.tbl_new_comments.c.id))
3181 mapper(Override, self.tbl_override,
3182 properties = dict(suite_id = self.tbl_override.c.suite,
3183 suite = relation(Suite),
3184 package = self.tbl_override.c.package,
3185 component_id = self.tbl_override.c.component,
3186 component = relation(Component),
3187 priority_id = self.tbl_override.c.priority,
3188 priority = relation(Priority),
3189 section_id = self.tbl_override.c.section,
3190 section = relation(Section),
3191 overridetype_id = self.tbl_override.c.type,
3192 overridetype = relation(OverrideType)))
3194 mapper(OverrideType, self.tbl_override_type,
3195 properties = dict(overridetype = self.tbl_override_type.c.type,
3196 overridetype_id = self.tbl_override_type.c.id))
3198 mapper(PolicyQueue, self.tbl_policy_queue,
3199 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3201 mapper(Priority, self.tbl_priority,
3202 properties = dict(priority_id = self.tbl_priority.c.id))
3204 mapper(Section, self.tbl_section,
3205 properties = dict(section_id = self.tbl_section.c.id,
3206 section=self.tbl_section.c.section))
3208 mapper(DBSource, self.tbl_source,
3209 properties = dict(source_id = self.tbl_source.c.id,
3210 version = self.tbl_source.c.version,
3211 maintainer_id = self.tbl_source.c.maintainer,
3212 poolfile_id = self.tbl_source.c.file,
3213 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3214 fingerprint_id = self.tbl_source.c.sig_fpr,
3215 fingerprint = relation(Fingerprint),
3216 changedby_id = self.tbl_source.c.changedby,
3217 srcfiles = relation(DSCFile,
3218 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3219 suites = relation(Suite, secondary=self.tbl_src_associations,
3220 backref=backref('sources', lazy='dynamic')),
3221 srcuploaders = relation(SrcUploader)),
3222 extension = validator)
3224 mapper(SourceACL, self.tbl_source_acl,
3225 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3227 mapper(SrcFormat, self.tbl_src_format,
3228 properties = dict(src_format_id = self.tbl_src_format.c.id,
3229 format_name = self.tbl_src_format.c.format_name))
3231 mapper(SrcUploader, self.tbl_src_uploaders,
3232 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3233 source_id = self.tbl_src_uploaders.c.source,
3234 source = relation(DBSource,
3235 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3236 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3237 maintainer = relation(Maintainer,
3238 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3240 mapper(Suite, self.tbl_suite,
3241 properties = dict(suite_id = self.tbl_suite.c.id,
3242 policy_queue = relation(PolicyQueue),
3243 copy_queues = relation(BuildQueue,
3244 secondary=self.tbl_suite_build_queue_copy)),
3245 extension = validator)
3247 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3248 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3249 suite = relation(Suite, backref='suitesrcformats'),
3250 src_format_id = self.tbl_suite_src_formats.c.src_format,
3251 src_format = relation(SrcFormat)))
3253 mapper(Uid, self.tbl_uid,
3254 properties = dict(uid_id = self.tbl_uid.c.id,
3255 fingerprint = relation(Fingerprint)),
3256 extension = validator)
3258 mapper(UploadBlock, self.tbl_upload_blocks,
3259 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3260 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3261 uid = relation(Uid, backref="uploadblocks")))
3263 ## Connection functions
3264 def __createconn(self):
3265 from config import Config
3269 connstr = "postgres://%s" % cnf["DB::Host"]
3270 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3271 connstr += ":%s" % cnf["DB::Port"]
3272 connstr += "/%s" % cnf["DB::Name"]
3275 connstr = "postgres:///%s" % cnf["DB::Name"]
3276 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3277 connstr += "?port=%s" % cnf["DB::Port"]
3279 self.db_pg = create_engine(connstr, echo=self.debug)
3280 self.db_meta = MetaData()
3281 self.db_meta.bind = self.db_pg
3282 self.db_smaker = sessionmaker(bind=self.db_pg,
3286 self.__setuptables()
3287 self.__setupmappers()
3290 return self.db_smaker()
3292 __all__.append('DBConn')