5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
250 This function should be implemented by derived classes to validate self.
251 It may raise the DBUpdateError exception if needed.
255 __all__.append('ORMObject')
257 ################################################################################
259 class Validator(MapperExtension):
261 This class calls the validate() method for each instance for the
262 'before_update' and 'before_insert' events. A global object validator is
263 used for configuring the individual mappers.
266 def before_update(self, mapper, connection, instance):
270 def before_insert(self, mapper, connection, instance):
274 validator = Validator()
276 ################################################################################
278 class Architecture(ORMObject):
279 def __init__(self, arch_string = None, description = None):
280 self.arch_string = arch_string
281 self.description = description
283 def __eq__(self, val):
284 if isinstance(val, str):
285 return (self.arch_string== val)
286 # This signals to use the normal comparison operator
287 return NotImplemented
289 def __ne__(self, val):
290 if isinstance(val, str):
291 return (self.arch_string != val)
292 # This signals to use the normal comparison operator
293 return NotImplemented
295 def properties(self):
296 return ['arch_string', 'arch_id', 'suites_count']
299 if self.arch_string is None or len(self.arch_string) == 0:
300 raise DBUpdateError( \
301 "Validation failed because 'arch_string' must not be empty in object\n%s" % \
304 __all__.append('Architecture')
307 def get_architecture(architecture, session=None):
309 Returns database id for given C{architecture}.
311 @type architecture: string
312 @param architecture: The name of the architecture
314 @type session: Session
315 @param session: Optional SQLA session object (a temporary one will be
316 generated if not supplied)
319 @return: Architecture object for the given arch (None if not present)
322 q = session.query(Architecture).filter_by(arch_string=architecture)
326 except NoResultFound:
329 __all__.append('get_architecture')
331 # TODO: should be removed because the implementation is too trivial
333 def get_architecture_suites(architecture, session=None):
335 Returns list of Suite objects for given C{architecture} name
337 @type architecture: str
338 @param architecture: Architecture name to search for
340 @type session: Session
341 @param session: Optional SQL session object (a temporary one will be
342 generated if not supplied)
345 @return: list of Suite objects for the given name (may be empty)
348 return get_architecture(architecture, session).suites
350 __all__.append('get_architecture_suites')
352 ################################################################################
354 class Archive(object):
355 def __init__(self, *args, **kwargs):
359 return '<Archive %s>' % self.archive_name
361 __all__.append('Archive')
364 def get_archive(archive, session=None):
366 returns database id for given C{archive}.
368 @type archive: string
369 @param archive: the name of the arhive
371 @type session: Session
372 @param session: Optional SQLA session object (a temporary one will be
373 generated if not supplied)
376 @return: Archive object for the given name (None if not present)
379 archive = archive.lower()
381 q = session.query(Archive).filter_by(archive_name=archive)
385 except NoResultFound:
388 __all__.append('get_archive')
390 ################################################################################
392 class BinAssociation(object):
393 def __init__(self, *args, **kwargs):
397 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
399 __all__.append('BinAssociation')
401 ################################################################################
403 class BinContents(object):
404 def __init__(self, *args, **kwargs):
408 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
410 __all__.append('BinContents')
412 ################################################################################
414 class DBBinary(object):
415 def __init__(self, *args, **kwargs):
419 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
421 __all__.append('DBBinary')
424 def get_suites_binary_in(package, session=None):
426 Returns list of Suite objects which given C{package} name is in
429 @param package: DBBinary package name to search for
432 @return: list of Suite objects for the given package
435 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
437 __all__.append('get_suites_binary_in')
440 def get_binary_from_id(binary_id, session=None):
442 Returns DBBinary object for given C{id}
445 @param binary_id: Id of the required binary
447 @type session: Session
448 @param session: Optional SQLA session object (a temporary one will be
449 generated if not supplied)
452 @return: DBBinary object for the given binary (None if not present)
455 q = session.query(DBBinary).filter_by(binary_id=binary_id)
459 except NoResultFound:
462 __all__.append('get_binary_from_id')
465 def get_binaries_from_name(package, version=None, architecture=None, session=None):
467 Returns list of DBBinary objects for given C{package} name
470 @param package: DBBinary package name to search for
472 @type version: str or None
473 @param version: Version to search for (or None)
475 @type architecture: str, list or None
476 @param architecture: Architectures to limit to (or None if no limit)
478 @type session: Session
479 @param session: Optional SQL session object (a temporary one will be
480 generated if not supplied)
483 @return: list of DBBinary objects for the given name (may be empty)
486 q = session.query(DBBinary).filter_by(package=package)
488 if version is not None:
489 q = q.filter_by(version=version)
491 if architecture is not None:
492 if not isinstance(architecture, list):
493 architecture = [architecture]
494 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
500 __all__.append('get_binaries_from_name')
503 def get_binaries_from_source_id(source_id, session=None):
505 Returns list of DBBinary objects for given C{source_id}
508 @param source_id: source_id to search for
510 @type session: Session
511 @param session: Optional SQL session object (a temporary one will be
512 generated if not supplied)
515 @return: list of DBBinary objects for the given name (may be empty)
518 return session.query(DBBinary).filter_by(source_id=source_id).all()
520 __all__.append('get_binaries_from_source_id')
523 def get_binary_from_name_suite(package, suitename, session=None):
524 ### For dak examine-package
525 ### XXX: Doesn't use object API yet
527 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
528 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
529 WHERE b.package='%(package)s'
531 AND fi.location = l.id
532 AND l.component = c.id
535 AND su.suite_name %(suitename)s
536 ORDER BY b.version DESC"""
538 return session.execute(sql % {'package': package, 'suitename': suitename})
540 __all__.append('get_binary_from_name_suite')
543 def get_binary_components(package, suitename, arch, session=None):
544 # Check for packages that have moved from one component to another
545 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
546 WHERE b.package=:package AND s.suite_name=:suitename
547 AND (a.arch_string = :arch OR a.arch_string = 'all')
548 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
549 AND f.location = l.id
550 AND l.component = c.id
553 vals = {'package': package, 'suitename': suitename, 'arch': arch}
555 return session.execute(query, vals)
557 __all__.append('get_binary_components')
559 ################################################################################
561 class BinaryACL(object):
562 def __init__(self, *args, **kwargs):
566 return '<BinaryACL %s>' % self.binary_acl_id
568 __all__.append('BinaryACL')
570 ################################################################################
572 class BinaryACLMap(object):
573 def __init__(self, *args, **kwargs):
577 return '<BinaryACLMap %s>' % self.binary_acl_map_id
579 __all__.append('BinaryACLMap')
581 ################################################################################
586 ArchiveDir "%(archivepath)s";
587 OverrideDir "%(overridedir)s";
588 CacheDir "%(cachedir)s";
593 Packages::Compress ". bzip2 gzip";
594 Sources::Compress ". bzip2 gzip";
599 bindirectory "incoming"
604 BinOverride "override.sid.all3";
605 BinCacheDB "packages-accepted.db";
607 FileList "%(filelist)s";
610 Packages::Extensions ".deb .udeb";
613 bindirectory "incoming/"
616 BinOverride "override.sid.all3";
617 SrcOverride "override.sid.all3.src";
618 FileList "%(filelist)s";
622 class BuildQueue(object):
623 def __init__(self, *args, **kwargs):
627 return '<BuildQueue %s>' % self.queue_name
629 def write_metadata(self, starttime, force=False):
630 # Do we write out metafiles?
631 if not (force or self.generate_metadata):
634 session = DBConn().session().object_session(self)
636 fl_fd = fl_name = ac_fd = ac_name = None
638 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
639 startdir = os.getcwd()
642 # Grab files we want to include
643 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
644 # Write file list with newer files
645 (fl_fd, fl_name) = mkstemp()
647 os.write(fl_fd, '%s\n' % n.fullpath)
652 # Write minimal apt.conf
653 # TODO: Remove hardcoding from template
654 (ac_fd, ac_name) = mkstemp()
655 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
657 'cachedir': cnf["Dir::Cache"],
658 'overridedir': cnf["Dir::Override"],
662 # Run apt-ftparchive generate
663 os.chdir(os.path.dirname(ac_name))
664 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
666 # Run apt-ftparchive release
667 # TODO: Eww - fix this
668 bname = os.path.basename(self.path)
672 # We have to remove the Release file otherwise it'll be included in the
675 os.unlink(os.path.join(bname, 'Release'))
679 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
681 # Crude hack with open and append, but this whole section is and should be redone.
682 if self.notautomatic:
683 release=open("Release", "a")
684 release.write("NotAutomatic: yes")
689 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
690 if cnf.has_key("Dinstall::SigningPubKeyring"):
691 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
693 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
695 # Move the files if we got this far
696 os.rename('Release', os.path.join(bname, 'Release'))
698 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
700 # Clean up any left behind files
727 def clean_and_update(self, starttime, Logger, dryrun=False):
728 """WARNING: This routine commits for you"""
729 session = DBConn().session().object_session(self)
731 if self.generate_metadata and not dryrun:
732 self.write_metadata(starttime)
734 # Grab files older than our execution time
735 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
741 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
743 Logger.log(["I: Removing %s from the queue" % o.fullpath])
744 os.unlink(o.fullpath)
747 # If it wasn't there, don't worry
748 if e.errno == ENOENT:
751 # TODO: Replace with proper logging call
752 Logger.log(["E: Could not remove %s" % o.fullpath])
759 for f in os.listdir(self.path):
760 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
764 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
765 except NoResultFound:
766 fp = os.path.join(self.path, f)
768 Logger.log(["I: Would remove unused link %s" % fp])
770 Logger.log(["I: Removing unused link %s" % fp])
774 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
776 def add_file_from_pool(self, poolfile):
777 """Copies a file into the pool. Assumes that the PoolFile object is
778 attached to the same SQLAlchemy session as the Queue object is.
780 The caller is responsible for committing after calling this function."""
781 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
783 # Check if we have a file of this name or this ID already
784 for f in self.queuefiles:
785 if f.fileid is not None and f.fileid == poolfile.file_id or \
786 f.poolfile.filename == poolfile_basename:
787 # In this case, update the BuildQueueFile entry so we
788 # don't remove it too early
789 f.lastused = datetime.now()
790 DBConn().session().object_session(poolfile).add(f)
793 # Prepare BuildQueueFile object
794 qf = BuildQueueFile()
795 qf.build_queue_id = self.queue_id
796 qf.lastused = datetime.now()
797 qf.filename = poolfile_basename
799 targetpath = poolfile.fullpath
800 queuepath = os.path.join(self.path, poolfile_basename)
804 # We need to copy instead of symlink
806 utils.copy(targetpath, queuepath)
807 # NULL in the fileid field implies a copy
810 os.symlink(targetpath, queuepath)
811 qf.fileid = poolfile.file_id
815 # Get the same session as the PoolFile is using and add the qf to it
816 DBConn().session().object_session(poolfile).add(qf)
821 __all__.append('BuildQueue')
824 def get_build_queue(queuename, session=None):
826 Returns BuildQueue object for given C{queue name}, creating it if it does not
829 @type queuename: string
830 @param queuename: The name of the queue
832 @type session: Session
833 @param session: Optional SQLA session object (a temporary one will be
834 generated if not supplied)
837 @return: BuildQueue object for the given queue
840 q = session.query(BuildQueue).filter_by(queue_name=queuename)
844 except NoResultFound:
847 __all__.append('get_build_queue')
849 ################################################################################
851 class BuildQueueFile(object):
852 def __init__(self, *args, **kwargs):
856 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
860 return os.path.join(self.buildqueue.path, self.filename)
863 __all__.append('BuildQueueFile')
865 ################################################################################
867 class ChangePendingBinary(object):
868 def __init__(self, *args, **kwargs):
872 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
874 __all__.append('ChangePendingBinary')
876 ################################################################################
878 class ChangePendingFile(object):
879 def __init__(self, *args, **kwargs):
883 return '<ChangePendingFile %s>' % self.change_pending_file_id
885 __all__.append('ChangePendingFile')
887 ################################################################################
889 class ChangePendingSource(object):
890 def __init__(self, *args, **kwargs):
894 return '<ChangePendingSource %s>' % self.change_pending_source_id
896 __all__.append('ChangePendingSource')
898 ################################################################################
900 class Component(object):
901 def __init__(self, *args, **kwargs):
904 def __eq__(self, val):
905 if isinstance(val, str):
906 return (self.component_name == val)
907 # This signals to use the normal comparison operator
908 return NotImplemented
910 def __ne__(self, val):
911 if isinstance(val, str):
912 return (self.component_name != val)
913 # This signals to use the normal comparison operator
914 return NotImplemented
917 return '<Component %s>' % self.component_name
920 __all__.append('Component')
923 def get_component(component, session=None):
925 Returns database id for given C{component}.
927 @type component: string
928 @param component: The name of the override type
931 @return: the database id for the given component
934 component = component.lower()
936 q = session.query(Component).filter_by(component_name=component)
940 except NoResultFound:
943 __all__.append('get_component')
945 ################################################################################
947 class DBConfig(object):
948 def __init__(self, *args, **kwargs):
952 return '<DBConfig %s>' % self.name
954 __all__.append('DBConfig')
956 ################################################################################
959 def get_or_set_contents_file_id(filename, session=None):
961 Returns database id for given filename.
963 If no matching file is found, a row is inserted.
965 @type filename: string
966 @param filename: The filename
967 @type session: SQLAlchemy
968 @param session: Optional SQL session object (a temporary one will be
969 generated if not supplied). If not passed, a commit will be performed at
970 the end of the function, otherwise the caller is responsible for commiting.
973 @return: the database id for the given component
976 q = session.query(ContentFilename).filter_by(filename=filename)
979 ret = q.one().cafilename_id
980 except NoResultFound:
981 cf = ContentFilename()
982 cf.filename = filename
984 session.commit_or_flush()
985 ret = cf.cafilename_id
989 __all__.append('get_or_set_contents_file_id')
992 def get_contents(suite, overridetype, section=None, session=None):
994 Returns contents for a suite / overridetype combination, limiting
995 to a section if not None.
998 @param suite: Suite object
1000 @type overridetype: OverrideType
1001 @param overridetype: OverrideType object
1003 @type section: Section
1004 @param section: Optional section object to limit results to
1006 @type session: SQLAlchemy
1007 @param session: Optional SQL session object (a temporary one will be
1008 generated if not supplied)
1010 @rtype: ResultsProxy
1011 @return: ResultsProxy object set up to return tuples of (filename, section,
1015 # find me all of the contents for a given suite
1016 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1020 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1021 JOIN content_file_names n ON (c.filename=n.id)
1022 JOIN binaries b ON (b.id=c.binary_pkg)
1023 JOIN override o ON (o.package=b.package)
1024 JOIN section s ON (s.id=o.section)
1025 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1026 AND b.type=:overridetypename"""
1028 vals = {'suiteid': suite.suite_id,
1029 'overridetypeid': overridetype.overridetype_id,
1030 'overridetypename': overridetype.overridetype}
1032 if section is not None:
1033 contents_q += " AND s.id = :sectionid"
1034 vals['sectionid'] = section.section_id
1036 contents_q += " ORDER BY fn"
1038 return session.execute(contents_q, vals)
1040 __all__.append('get_contents')
1042 ################################################################################
1044 class ContentFilepath(object):
1045 def __init__(self, *args, **kwargs):
1049 return '<ContentFilepath %s>' % self.filepath
1051 __all__.append('ContentFilepath')
1054 def get_or_set_contents_path_id(filepath, session=None):
1056 Returns database id for given path.
1058 If no matching file is found, a row is inserted.
1060 @type filepath: string
1061 @param filepath: The filepath
1063 @type session: SQLAlchemy
1064 @param session: Optional SQL session object (a temporary one will be
1065 generated if not supplied). If not passed, a commit will be performed at
1066 the end of the function, otherwise the caller is responsible for commiting.
1069 @return: the database id for the given path
1072 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1075 ret = q.one().cafilepath_id
1076 except NoResultFound:
1077 cf = ContentFilepath()
1078 cf.filepath = filepath
1080 session.commit_or_flush()
1081 ret = cf.cafilepath_id
1085 __all__.append('get_or_set_contents_path_id')
1087 ################################################################################
1089 class ContentAssociation(object):
1090 def __init__(self, *args, **kwargs):
1094 return '<ContentAssociation %s>' % self.ca_id
1096 __all__.append('ContentAssociation')
1098 def insert_content_paths(binary_id, fullpaths, session=None):
1100 Make sure given path is associated with given binary id
1102 @type binary_id: int
1103 @param binary_id: the id of the binary
1104 @type fullpaths: list
1105 @param fullpaths: the list of paths of the file being associated with the binary
1106 @type session: SQLAlchemy session
1107 @param session: Optional SQLAlchemy session. If this is passed, the caller
1108 is responsible for ensuring a transaction has begun and committing the
1109 results or rolling back based on the result code. If not passed, a commit
1110 will be performed at the end of the function, otherwise the caller is
1111 responsible for commiting.
1113 @return: True upon success
1116 privatetrans = False
1118 session = DBConn().session()
1123 def generate_path_dicts():
1124 for fullpath in fullpaths:
1125 if fullpath.startswith( './' ):
1126 fullpath = fullpath[2:]
1128 yield {'filename':fullpath, 'id': binary_id }
1130 for d in generate_path_dicts():
1131 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1140 traceback.print_exc()
1142 # Only rollback if we set up the session ourself
1149 __all__.append('insert_content_paths')
1151 ################################################################################
1153 class DSCFile(object):
1154 def __init__(self, *args, **kwargs):
1158 return '<DSCFile %s>' % self.dscfile_id
1160 __all__.append('DSCFile')
1163 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1165 Returns a list of DSCFiles which may be empty
1167 @type dscfile_id: int (optional)
1168 @param dscfile_id: the dscfile_id of the DSCFiles to find
1170 @type source_id: int (optional)
1171 @param source_id: the source id related to the DSCFiles to find
1173 @type poolfile_id: int (optional)
1174 @param poolfile_id: the poolfile id related to the DSCFiles to find
1177 @return: Possibly empty list of DSCFiles
1180 q = session.query(DSCFile)
1182 if dscfile_id is not None:
1183 q = q.filter_by(dscfile_id=dscfile_id)
1185 if source_id is not None:
1186 q = q.filter_by(source_id=source_id)
1188 if poolfile_id is not None:
1189 q = q.filter_by(poolfile_id=poolfile_id)
1193 __all__.append('get_dscfiles')
1195 ################################################################################
1197 class PoolFile(ORMObject):
1198 def __init__(self, filename = None, location = None, filesize = -1, \
1200 self.filename = filename
1201 self.location = location
1202 self.filesize = filesize
1203 self.md5sum = md5sum
1207 return os.path.join(self.location.path, self.filename)
1209 def is_valid(self, filesize = -1, md5sum = None):\
1210 return self.filesize == filesize and self.md5sum == md5sum
1212 def properties(self):
1213 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1214 'sha256sum', 'location', 'source', 'last_used']
1216 __all__.append('PoolFile')
1219 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1222 (ValidFileFound [boolean], PoolFile object or None)
1224 @type filename: string
1225 @param filename: the filename of the file to check against the DB
1228 @param filesize: the size of the file to check against the DB
1230 @type md5sum: string
1231 @param md5sum: the md5sum of the file to check against the DB
1233 @type location_id: int
1234 @param location_id: the id of the location to look in
1237 @return: Tuple of length 2.
1238 - If valid pool file found: (C{True}, C{PoolFile object})
1239 - If valid pool file not found:
1240 - (C{False}, C{None}) if no file found
1241 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1244 poolfile = session.query(Location).get(location_id). \
1245 files.filter_by(filename=filename).first()
1247 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1250 return (valid, poolfile)
1252 __all__.append('check_poolfile')
1254 # TODO: the implementation can trivially be inlined at the place where the
1255 # function is called
1257 def get_poolfile_by_id(file_id, session=None):
1259 Returns a PoolFile objects or None for the given id
1262 @param file_id: the id of the file to look for
1264 @rtype: PoolFile or None
1265 @return: either the PoolFile object or None
1268 return session.query(PoolFile).get(file_id)
1270 __all__.append('get_poolfile_by_id')
1273 def get_poolfile_like_name(filename, session=None):
1275 Returns an array of PoolFile objects which are like the given name
1277 @type filename: string
1278 @param filename: the filename of the file to check against the DB
1281 @return: array of PoolFile objects
1284 # TODO: There must be a way of properly using bind parameters with %FOO%
1285 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1289 __all__.append('get_poolfile_like_name')
1292 def add_poolfile(filename, datadict, location_id, session=None):
1294 Add a new file to the pool
1296 @type filename: string
1297 @param filename: filename
1299 @type datadict: dict
1300 @param datadict: dict with needed data
1302 @type location_id: int
1303 @param location_id: database id of the location
1306 @return: the PoolFile object created
1308 poolfile = PoolFile()
1309 poolfile.filename = filename
1310 poolfile.filesize = datadict["size"]
1311 poolfile.md5sum = datadict["md5sum"]
1312 poolfile.sha1sum = datadict["sha1sum"]
1313 poolfile.sha256sum = datadict["sha256sum"]
1314 poolfile.location_id = location_id
1316 session.add(poolfile)
1317 # Flush to get a file id (NB: This is not a commit)
1322 __all__.append('add_poolfile')
1324 ################################################################################
1326 class Fingerprint(object):
1327 def __init__(self, fingerprint = None):
1328 self.fingerprint = fingerprint
1331 return '<Fingerprint %s>' % self.fingerprint
1333 __all__.append('Fingerprint')
1336 def get_fingerprint(fpr, session=None):
1338 Returns Fingerprint object for given fpr.
1341 @param fpr: The fpr to find / add
1343 @type session: SQLAlchemy
1344 @param session: Optional SQL session object (a temporary one will be
1345 generated if not supplied).
1348 @return: the Fingerprint object for the given fpr or None
1351 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1355 except NoResultFound:
1360 __all__.append('get_fingerprint')
1363 def get_or_set_fingerprint(fpr, session=None):
1365 Returns Fingerprint object for given fpr.
1367 If no matching fpr is found, a row is inserted.
1370 @param fpr: The fpr to find / add
1372 @type session: SQLAlchemy
1373 @param session: Optional SQL session object (a temporary one will be
1374 generated if not supplied). If not passed, a commit will be performed at
1375 the end of the function, otherwise the caller is responsible for commiting.
1376 A flush will be performed either way.
1379 @return: the Fingerprint object for the given fpr
1382 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1386 except NoResultFound:
1387 fingerprint = Fingerprint()
1388 fingerprint.fingerprint = fpr
1389 session.add(fingerprint)
1390 session.commit_or_flush()
1395 __all__.append('get_or_set_fingerprint')
1397 ################################################################################
1399 # Helper routine for Keyring class
1400 def get_ldap_name(entry):
1402 for k in ["cn", "mn", "sn"]:
1404 if ret and ret[0] != "" and ret[0] != "-":
1406 return " ".join(name)
1408 ################################################################################
1410 class Keyring(object):
1411 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1412 " --with-colons --fingerprint --fingerprint"
1417 def __init__(self, *args, **kwargs):
1421 return '<Keyring %s>' % self.keyring_name
1423 def de_escape_gpg_str(self, txt):
1424 esclist = re.split(r'(\\x..)', txt)
1425 for x in range(1,len(esclist),2):
1426 esclist[x] = "%c" % (int(esclist[x][2:],16))
1427 return "".join(esclist)
1429 def parse_address(self, uid):
1430 """parses uid and returns a tuple of real name and email address"""
1432 (name, address) = email.Utils.parseaddr(uid)
1433 name = re.sub(r"\s*[(].*[)]", "", name)
1434 name = self.de_escape_gpg_str(name)
1437 return (name, address)
1439 def load_keys(self, keyring):
1440 if not self.keyring_id:
1441 raise Exception('Must be initialized with database information')
1443 k = os.popen(self.gpg_invocation % keyring, "r")
1447 for line in k.xreadlines():
1448 field = line.split(":")
1449 if field[0] == "pub":
1452 (name, addr) = self.parse_address(field[9])
1454 self.keys[key]["email"] = addr
1455 self.keys[key]["name"] = name
1456 self.keys[key]["fingerprints"] = []
1458 elif key and field[0] == "sub" and len(field) >= 12:
1459 signingkey = ("s" in field[11])
1460 elif key and field[0] == "uid":
1461 (name, addr) = self.parse_address(field[9])
1462 if "email" not in self.keys[key] and "@" in addr:
1463 self.keys[key]["email"] = addr
1464 self.keys[key]["name"] = name
1465 elif signingkey and field[0] == "fpr":
1466 self.keys[key]["fingerprints"].append(field[9])
1467 self.fpr_lookup[field[9]] = key
1469 def import_users_from_ldap(self, session):
1473 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1474 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1476 l = ldap.open(LDAPServer)
1477 l.simple_bind_s("","")
1478 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1479 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1480 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1482 ldap_fin_uid_id = {}
1489 uid = entry["uid"][0]
1490 name = get_ldap_name(entry)
1491 fingerprints = entry["keyFingerPrint"]
1493 for f in fingerprints:
1494 key = self.fpr_lookup.get(f, None)
1495 if key not in self.keys:
1497 self.keys[key]["uid"] = uid
1501 keyid = get_or_set_uid(uid, session).uid_id
1502 byuid[keyid] = (uid, name)
1503 byname[uid] = (keyid, name)
1505 return (byname, byuid)
1507 def generate_users_from_keyring(self, format, session):
1511 for x in self.keys.keys():
1512 if "email" not in self.keys[x]:
1514 self.keys[x]["uid"] = format % "invalid-uid"
1516 uid = format % self.keys[x]["email"]
1517 keyid = get_or_set_uid(uid, session).uid_id
1518 byuid[keyid] = (uid, self.keys[x]["name"])
1519 byname[uid] = (keyid, self.keys[x]["name"])
1520 self.keys[x]["uid"] = uid
1523 uid = format % "invalid-uid"
1524 keyid = get_or_set_uid(uid, session).uid_id
1525 byuid[keyid] = (uid, "ungeneratable user id")
1526 byname[uid] = (keyid, "ungeneratable user id")
1528 return (byname, byuid)
1530 __all__.append('Keyring')
1533 def get_keyring(keyring, session=None):
1535 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1536 If C{keyring} already has an entry, simply return the existing Keyring
1538 @type keyring: string
1539 @param keyring: the keyring name
1542 @return: the Keyring object for this keyring
1545 q = session.query(Keyring).filter_by(keyring_name=keyring)
1549 except NoResultFound:
1552 __all__.append('get_keyring')
1554 ################################################################################
1556 class KeyringACLMap(object):
1557 def __init__(self, *args, **kwargs):
1561 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1563 __all__.append('KeyringACLMap')
1565 ################################################################################
1567 class DBChange(object):
1568 def __init__(self, *args, **kwargs):
1572 return '<DBChange %s>' % self.changesname
1574 def clean_from_queue(self):
1575 session = DBConn().session().object_session(self)
1577 # Remove changes_pool_files entries
1580 # Remove changes_pending_files references
1583 # Clear out of queue
1584 self.in_queue = None
1585 self.approved_for_id = None
1587 __all__.append('DBChange')
1590 def get_dbchange(filename, session=None):
1592 returns DBChange object for given C{filename}.
1594 @type filename: string
1595 @param filename: the name of the file
1597 @type session: Session
1598 @param session: Optional SQLA session object (a temporary one will be
1599 generated if not supplied)
1602 @return: DBChange object for the given filename (C{None} if not present)
1605 q = session.query(DBChange).filter_by(changesname=filename)
1609 except NoResultFound:
1612 __all__.append('get_dbchange')
1614 ################################################################################
1616 class Location(object):
1617 def __init__(self, path = None):
1619 # the column 'type' should go away, see comment at mapper
1620 self.archive_type = 'pool'
1623 return '<Location %s (%s)>' % (self.path, self.location_id)
1625 __all__.append('Location')
1628 def get_location(location, component=None, archive=None, session=None):
1630 Returns Location object for the given combination of location, component
1633 @type location: string
1634 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1636 @type component: string
1637 @param component: the component name (if None, no restriction applied)
1639 @type archive: string
1640 @param archive: the archive name (if None, no restriction applied)
1642 @rtype: Location / None
1643 @return: Either a Location object or None if one can't be found
1646 q = session.query(Location).filter_by(path=location)
1648 if archive is not None:
1649 q = q.join(Archive).filter_by(archive_name=archive)
1651 if component is not None:
1652 q = q.join(Component).filter_by(component_name=component)
1656 except NoResultFound:
1659 __all__.append('get_location')
1661 ################################################################################
1663 class Maintainer(object):
1664 def __init__(self, name = None):
1668 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1670 def get_split_maintainer(self):
1671 if not hasattr(self, 'name') or self.name is None:
1672 return ('', '', '', '')
1674 return fix_maintainer(self.name.strip())
1676 __all__.append('Maintainer')
1679 def get_or_set_maintainer(name, session=None):
1681 Returns Maintainer object for given maintainer name.
1683 If no matching maintainer name is found, a row is inserted.
1686 @param name: The maintainer name to add
1688 @type session: SQLAlchemy
1689 @param session: Optional SQL session object (a temporary one will be
1690 generated if not supplied). If not passed, a commit will be performed at
1691 the end of the function, otherwise the caller is responsible for commiting.
1692 A flush will be performed either way.
1695 @return: the Maintainer object for the given maintainer
1698 q = session.query(Maintainer).filter_by(name=name)
1701 except NoResultFound:
1702 maintainer = Maintainer()
1703 maintainer.name = name
1704 session.add(maintainer)
1705 session.commit_or_flush()
1710 __all__.append('get_or_set_maintainer')
1713 def get_maintainer(maintainer_id, session=None):
1715 Return the name of the maintainer behind C{maintainer_id} or None if that
1716 maintainer_id is invalid.
1718 @type maintainer_id: int
1719 @param maintainer_id: the id of the maintainer
1722 @return: the Maintainer with this C{maintainer_id}
1725 return session.query(Maintainer).get(maintainer_id)
1727 __all__.append('get_maintainer')
1729 ################################################################################
1731 class NewComment(object):
1732 def __init__(self, *args, **kwargs):
1736 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1738 __all__.append('NewComment')
1741 def has_new_comment(package, version, session=None):
1743 Returns true if the given combination of C{package}, C{version} has a comment.
1745 @type package: string
1746 @param package: name of the package
1748 @type version: string
1749 @param version: package version
1751 @type session: Session
1752 @param session: Optional SQLA session object (a temporary one will be
1753 generated if not supplied)
1759 q = session.query(NewComment)
1760 q = q.filter_by(package=package)
1761 q = q.filter_by(version=version)
1763 return bool(q.count() > 0)
1765 __all__.append('has_new_comment')
1768 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1770 Returns (possibly empty) list of NewComment objects for the given
1773 @type package: string (optional)
1774 @param package: name of the package
1776 @type version: string (optional)
1777 @param version: package version
1779 @type comment_id: int (optional)
1780 @param comment_id: An id of a comment
1782 @type session: Session
1783 @param session: Optional SQLA session object (a temporary one will be
1784 generated if not supplied)
1787 @return: A (possibly empty) list of NewComment objects will be returned
1790 q = session.query(NewComment)
1791 if package is not None: q = q.filter_by(package=package)
1792 if version is not None: q = q.filter_by(version=version)
1793 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1797 __all__.append('get_new_comments')
1799 ################################################################################
1801 class Override(object):
1802 def __init__(self, *args, **kwargs):
1806 return '<Override %s (%s)>' % (self.package, self.suite_id)
1808 __all__.append('Override')
1811 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1813 Returns Override object for the given parameters
1815 @type package: string
1816 @param package: The name of the package
1818 @type suite: string, list or None
1819 @param suite: The name of the suite (or suites if a list) to limit to. If
1820 None, don't limit. Defaults to None.
1822 @type component: string, list or None
1823 @param component: The name of the component (or components if a list) to
1824 limit to. If None, don't limit. Defaults to None.
1826 @type overridetype: string, list or None
1827 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1828 limit to. If None, don't limit. Defaults to None.
1830 @type session: Session
1831 @param session: Optional SQLA session object (a temporary one will be
1832 generated if not supplied)
1835 @return: A (possibly empty) list of Override objects will be returned
1838 q = session.query(Override)
1839 q = q.filter_by(package=package)
1841 if suite is not None:
1842 if not isinstance(suite, list): suite = [suite]
1843 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1845 if component is not None:
1846 if not isinstance(component, list): component = [component]
1847 q = q.join(Component).filter(Component.component_name.in_(component))
1849 if overridetype is not None:
1850 if not isinstance(overridetype, list): overridetype = [overridetype]
1851 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1855 __all__.append('get_override')
1858 ################################################################################
1860 class OverrideType(object):
1861 def __init__(self, *args, **kwargs):
1865 return '<OverrideType %s>' % self.overridetype
1867 __all__.append('OverrideType')
1870 def get_override_type(override_type, session=None):
1872 Returns OverrideType object for given C{override type}.
1874 @type override_type: string
1875 @param override_type: The name of the override type
1877 @type session: Session
1878 @param session: Optional SQLA session object (a temporary one will be
1879 generated if not supplied)
1882 @return: the database id for the given override type
1885 q = session.query(OverrideType).filter_by(overridetype=override_type)
1889 except NoResultFound:
1892 __all__.append('get_override_type')
1894 ################################################################################
1896 class DebContents(object):
1897 def __init__(self, *args, **kwargs):
1901 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1903 __all__.append('DebContents')
1906 class UdebContents(object):
1907 def __init__(self, *args, **kwargs):
1911 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1913 __all__.append('UdebContents')
1915 class PendingBinContents(object):
1916 def __init__(self, *args, **kwargs):
1920 return '<PendingBinContents %s>' % self.contents_id
1922 __all__.append('PendingBinContents')
1924 def insert_pending_content_paths(package,
1929 Make sure given paths are temporarily associated with given
1933 @param package: the package to associate with should have been read in from the binary control file
1934 @type fullpaths: list
1935 @param fullpaths: the list of paths of the file being associated with the binary
1936 @type session: SQLAlchemy session
1937 @param session: Optional SQLAlchemy session. If this is passed, the caller
1938 is responsible for ensuring a transaction has begun and committing the
1939 results or rolling back based on the result code. If not passed, a commit
1940 will be performed at the end of the function
1942 @return: True upon success, False if there is a problem
1945 privatetrans = False
1948 session = DBConn().session()
1952 arch = get_architecture(package['Architecture'], session)
1953 arch_id = arch.arch_id
1955 # Remove any already existing recorded files for this package
1956 q = session.query(PendingBinContents)
1957 q = q.filter_by(package=package['Package'])
1958 q = q.filter_by(version=package['Version'])
1959 q = q.filter_by(architecture=arch_id)
1962 for fullpath in fullpaths:
1964 if fullpath.startswith( "./" ):
1965 fullpath = fullpath[2:]
1967 pca = PendingBinContents()
1968 pca.package = package['Package']
1969 pca.version = package['Version']
1971 pca.architecture = arch_id
1974 pca.type = 8 # gross
1976 pca.type = 7 # also gross
1979 # Only commit if we set up the session ourself
1987 except Exception, e:
1988 traceback.print_exc()
1990 # Only rollback if we set up the session ourself
1997 __all__.append('insert_pending_content_paths')
1999 ################################################################################
2001 class PolicyQueue(object):
2002 def __init__(self, *args, **kwargs):
2006 return '<PolicyQueue %s>' % self.queue_name
2008 __all__.append('PolicyQueue')
2011 def get_policy_queue(queuename, session=None):
2013 Returns PolicyQueue object for given C{queue name}
2015 @type queuename: string
2016 @param queuename: The name of the queue
2018 @type session: Session
2019 @param session: Optional SQLA session object (a temporary one will be
2020 generated if not supplied)
2023 @return: PolicyQueue object for the given queue
2026 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2030 except NoResultFound:
2033 __all__.append('get_policy_queue')
2036 def get_policy_queue_from_path(pathname, session=None):
2038 Returns PolicyQueue object for given C{path name}
2040 @type queuename: string
2041 @param queuename: The path
2043 @type session: Session
2044 @param session: Optional SQLA session object (a temporary one will be
2045 generated if not supplied)
2048 @return: PolicyQueue object for the given queue
2051 q = session.query(PolicyQueue).filter_by(path=pathname)
2055 except NoResultFound:
2058 __all__.append('get_policy_queue_from_path')
2060 ################################################################################
2062 class Priority(object):
2063 def __init__(self, *args, **kwargs):
2066 def __eq__(self, val):
2067 if isinstance(val, str):
2068 return (self.priority == val)
2069 # This signals to use the normal comparison operator
2070 return NotImplemented
2072 def __ne__(self, val):
2073 if isinstance(val, str):
2074 return (self.priority != val)
2075 # This signals to use the normal comparison operator
2076 return NotImplemented
2079 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2081 __all__.append('Priority')
2084 def get_priority(priority, session=None):
2086 Returns Priority object for given C{priority name}.
2088 @type priority: string
2089 @param priority: The name of the priority
2091 @type session: Session
2092 @param session: Optional SQLA session object (a temporary one will be
2093 generated if not supplied)
2096 @return: Priority object for the given priority
2099 q = session.query(Priority).filter_by(priority=priority)
2103 except NoResultFound:
2106 __all__.append('get_priority')
2109 def get_priorities(session=None):
2111 Returns dictionary of priority names -> id mappings
2113 @type session: Session
2114 @param session: Optional SQL session object (a temporary one will be
2115 generated if not supplied)
2118 @return: dictionary of priority names -> id mappings
2122 q = session.query(Priority)
2124 ret[x.priority] = x.priority_id
2128 __all__.append('get_priorities')
2130 ################################################################################
2132 class Section(object):
2133 def __init__(self, *args, **kwargs):
2136 def __eq__(self, val):
2137 if isinstance(val, str):
2138 return (self.section == val)
2139 # This signals to use the normal comparison operator
2140 return NotImplemented
2142 def __ne__(self, val):
2143 if isinstance(val, str):
2144 return (self.section != val)
2145 # This signals to use the normal comparison operator
2146 return NotImplemented
2149 return '<Section %s>' % self.section
2151 __all__.append('Section')
2154 def get_section(section, session=None):
2156 Returns Section object for given C{section name}.
2158 @type section: string
2159 @param section: The name of the section
2161 @type session: Session
2162 @param session: Optional SQLA session object (a temporary one will be
2163 generated if not supplied)
2166 @return: Section object for the given section name
2169 q = session.query(Section).filter_by(section=section)
2173 except NoResultFound:
2176 __all__.append('get_section')
2179 def get_sections(session=None):
2181 Returns dictionary of section names -> id mappings
2183 @type session: Session
2184 @param session: Optional SQL session object (a temporary one will be
2185 generated if not supplied)
2188 @return: dictionary of section names -> id mappings
2192 q = session.query(Section)
2194 ret[x.section] = x.section_id
2198 __all__.append('get_sections')
2200 ################################################################################
2202 class DBSource(object):
2203 def __init__(self, source = None, version = None, maintainer = None, \
2204 changedby = None, poolfile = None, install_date = None):
2205 self.source = source
2206 self.version = version
2207 self.maintainer = maintainer
2208 self.changedby = changedby
2209 self.poolfile = poolfile
2210 self.install_date = install_date
2213 return '<DBSource %s (%s)>' % (self.source, self.version)
2215 __all__.append('DBSource')
2218 def source_exists(source, source_version, suites = ["any"], session=None):
2220 Ensure that source exists somewhere in the archive for the binary
2221 upload being processed.
2222 1. exact match => 1.0-3
2223 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2225 @type source: string
2226 @param source: source name
2228 @type source_version: string
2229 @param source_version: expected source version
2232 @param suites: list of suites to check in, default I{any}
2234 @type session: Session
2235 @param session: Optional SQLA session object (a temporary one will be
2236 generated if not supplied)
2239 @return: returns 1 if a source with expected version is found, otherwise 0
2246 from daklib.regexes import re_bin_only_nmu
2247 orig_source_version = re_bin_only_nmu.sub('', source_version)
2249 for suite in suites:
2250 q = session.query(DBSource).filter_by(source=source). \
2251 filter(DBSource.version.in_([source_version, orig_source_version]))
2253 # source must exist in suite X, or in some other suite that's
2254 # mapped to X, recursively... silent-maps are counted too,
2255 # unreleased-maps aren't.
2256 maps = cnf.ValueList("SuiteMappings")[:]
2258 maps = [ m.split() for m in maps ]
2259 maps = [ (x[1], x[2]) for x in maps
2260 if x[0] == "map" or x[0] == "silent-map" ]
2263 if x[1] in s and x[0] not in s:
2266 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2271 # No source found so return not ok
2276 __all__.append('source_exists')
2279 def get_suites_source_in(source, session=None):
2281 Returns list of Suite objects which given C{source} name is in
2284 @param source: DBSource package name to search for
2287 @return: list of Suite objects for the given source
2290 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2292 __all__.append('get_suites_source_in')
2295 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2297 Returns list of DBSource objects for given C{source} name and other parameters
2300 @param source: DBSource package name to search for
2302 @type version: str or None
2303 @param version: DBSource version name to search for or None if not applicable
2305 @type dm_upload_allowed: bool
2306 @param dm_upload_allowed: If None, no effect. If True or False, only
2307 return packages with that dm_upload_allowed setting
2309 @type session: Session
2310 @param session: Optional SQL session object (a temporary one will be
2311 generated if not supplied)
2314 @return: list of DBSource objects for the given name (may be empty)
2317 q = session.query(DBSource).filter_by(source=source)
2319 if version is not None:
2320 q = q.filter_by(version=version)
2322 if dm_upload_allowed is not None:
2323 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2327 __all__.append('get_sources_from_name')
2329 # FIXME: This function fails badly if it finds more than 1 source package and
2330 # its implementation is trivial enough to be inlined.
2332 def get_source_in_suite(source, suite, session=None):
2334 Returns a DBSource object for a combination of C{source} and C{suite}.
2336 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2337 - B{suite} - a suite name, eg. I{unstable}
2339 @type source: string
2340 @param source: source package name
2343 @param suite: the suite name
2346 @return: the version for I{source} in I{suite}
2350 q = get_suite(suite, session).get_sources(source)
2353 except NoResultFound:
2356 __all__.append('get_source_in_suite')
2358 ################################################################################
2361 def add_dsc_to_db(u, filename, session=None):
2362 entry = u.pkg.files[filename]
2366 source.source = u.pkg.dsc["source"]
2367 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2368 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2369 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2370 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2371 source.install_date = datetime.now().date()
2373 dsc_component = entry["component"]
2374 dsc_location_id = entry["location id"]
2376 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2378 # Set up a new poolfile if necessary
2379 if not entry.has_key("files id") or not entry["files id"]:
2380 filename = entry["pool name"] + filename
2381 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2383 pfs.append(poolfile)
2384 entry["files id"] = poolfile.file_id
2386 source.poolfile_id = entry["files id"]
2389 suite_names = u.pkg.changes["distribution"].keys()
2390 source.suites = session.query(Suite). \
2391 filter(Suite.suite_name.in_(suite_names)).all()
2393 # Add the source files to the DB (files and dsc_files)
2395 dscfile.source_id = source.source_id
2396 dscfile.poolfile_id = entry["files id"]
2397 session.add(dscfile)
2399 for dsc_file, dentry in u.pkg.dsc_files.items():
2401 df.source_id = source.source_id
2403 # If the .orig tarball is already in the pool, it's
2404 # files id is stored in dsc_files by check_dsc().
2405 files_id = dentry.get("files id", None)
2407 # Find the entry in the files hash
2408 # TODO: Bail out here properly
2410 for f, e in u.pkg.files.items():
2415 if files_id is None:
2416 filename = dfentry["pool name"] + dsc_file
2418 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2419 # FIXME: needs to check for -1/-2 and or handle exception
2420 if found and obj is not None:
2421 files_id = obj.file_id
2424 # If still not found, add it
2425 if files_id is None:
2426 # HACK: Force sha1sum etc into dentry
2427 dentry["sha1sum"] = dfentry["sha1sum"]
2428 dentry["sha256sum"] = dfentry["sha256sum"]
2429 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2430 pfs.append(poolfile)
2431 files_id = poolfile.file_id
2433 poolfile = get_poolfile_by_id(files_id, session)
2434 if poolfile is None:
2435 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2436 pfs.append(poolfile)
2438 df.poolfile_id = files_id
2441 # Add the src_uploaders to the DB
2442 uploader_ids = [source.maintainer_id]
2443 if u.pkg.dsc.has_key("uploaders"):
2444 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2446 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2449 for up_id in uploader_ids:
2450 if added_ids.has_key(up_id):
2452 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2458 su.maintainer_id = up_id
2459 su.source_id = source.source_id
2464 return source, dsc_component, dsc_location_id, pfs
2466 __all__.append('add_dsc_to_db')
2469 def add_deb_to_db(u, filename, session=None):
2471 Contrary to what you might expect, this routine deals with both
2472 debs and udebs. That info is in 'dbtype', whilst 'type' is
2473 'deb' for both of them
2476 entry = u.pkg.files[filename]
2479 bin.package = entry["package"]
2480 bin.version = entry["version"]
2481 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2482 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2483 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2484 bin.binarytype = entry["dbtype"]
2487 filename = entry["pool name"] + filename
2488 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2489 if not entry.get("location id", None):
2490 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2492 if entry.get("files id", None):
2493 poolfile = get_poolfile_by_id(bin.poolfile_id)
2494 bin.poolfile_id = entry["files id"]
2496 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2497 bin.poolfile_id = entry["files id"] = poolfile.file_id
2500 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2501 if len(bin_sources) != 1:
2502 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2503 (bin.package, bin.version, entry["architecture"],
2504 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2506 bin.source_id = bin_sources[0].source_id
2508 # Add and flush object so it has an ID
2512 # Add BinAssociations
2513 for suite_name in u.pkg.changes["distribution"].keys():
2514 ba = BinAssociation()
2515 ba.binary_id = bin.binary_id
2516 ba.suite_id = get_suite(suite_name).suite_id
2521 # Deal with contents - disabled for now
2522 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2524 # print "REJECT\nCould not determine contents of package %s" % bin.package
2525 # session.rollback()
2526 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2530 __all__.append('add_deb_to_db')
2532 ################################################################################
2534 class SourceACL(object):
2535 def __init__(self, *args, **kwargs):
2539 return '<SourceACL %s>' % self.source_acl_id
2541 __all__.append('SourceACL')
2543 ################################################################################
2545 class SrcFormat(object):
2546 def __init__(self, *args, **kwargs):
2550 return '<SrcFormat %s>' % (self.format_name)
2552 __all__.append('SrcFormat')
2554 ################################################################################
2556 class SrcUploader(object):
2557 def __init__(self, *args, **kwargs):
2561 return '<SrcUploader %s>' % self.uploader_id
2563 __all__.append('SrcUploader')
2565 ################################################################################
2567 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2568 ('SuiteID', 'suite_id'),
2569 ('Version', 'version'),
2570 ('Origin', 'origin'),
2572 ('Description', 'description'),
2573 ('Untouchable', 'untouchable'),
2574 ('Announce', 'announce'),
2575 ('Codename', 'codename'),
2576 ('OverrideCodename', 'overridecodename'),
2577 ('ValidTime', 'validtime'),
2578 ('Priority', 'priority'),
2579 ('NotAutomatic', 'notautomatic'),
2580 ('CopyChanges', 'copychanges'),
2581 ('OverrideSuite', 'overridesuite')]
2583 # Why the heck don't we have any UNIQUE constraints in table suite?
2584 # TODO: Add UNIQUE constraints for appropriate columns.
2585 class Suite(object):
2586 def __init__(self, suite_name = None, version = None):
2587 self.suite_name = suite_name
2588 self.version = version
2591 return '<Suite %s>' % self.suite_name
2593 def __eq__(self, val):
2594 if isinstance(val, str):
2595 return (self.suite_name == val)
2596 # This signals to use the normal comparison operator
2597 return NotImplemented
2599 def __ne__(self, val):
2600 if isinstance(val, str):
2601 return (self.suite_name != val)
2602 # This signals to use the normal comparison operator
2603 return NotImplemented
2607 for disp, field in SUITE_FIELDS:
2608 val = getattr(self, field, None)
2610 ret.append("%s: %s" % (disp, val))
2612 return "\n".join(ret)
2614 def get_architectures(self, skipsrc=False, skipall=False):
2616 Returns list of Architecture objects
2618 @type skipsrc: boolean
2619 @param skipsrc: Whether to skip returning the 'source' architecture entry
2622 @type skipall: boolean
2623 @param skipall: Whether to skip returning the 'all' architecture entry
2627 @return: list of Architecture objects for the given name (may be empty)
2630 q = object_session(self).query(Architecture).with_parent(self)
2632 q = q.filter(Architecture.arch_string != 'source')
2634 q = q.filter(Architecture.arch_string != 'all')
2635 return q.order_by(Architecture.arch_string).all()
2637 def get_sources(self, source):
2639 Returns a query object representing DBSource that is part of C{suite}.
2641 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2643 @type source: string
2644 @param source: source package name
2646 @rtype: sqlalchemy.orm.query.Query
2647 @return: a query of DBSource
2651 session = object_session(self)
2652 return session.query(DBSource).filter_by(source = source). \
2655 __all__.append('Suite')
2658 def get_suite(suite, session=None):
2660 Returns Suite object for given C{suite name}.
2663 @param suite: The name of the suite
2665 @type session: Session
2666 @param session: Optional SQLA session object (a temporary one will be
2667 generated if not supplied)
2670 @return: Suite object for the requested suite name (None if not present)
2673 q = session.query(Suite).filter_by(suite_name=suite)
2677 except NoResultFound:
2680 __all__.append('get_suite')
2682 ################################################################################
2684 # TODO: should be removed because the implementation is too trivial
2686 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2688 Returns list of Architecture objects for given C{suite} name
2691 @param suite: Suite name to search for
2693 @type skipsrc: boolean
2694 @param skipsrc: Whether to skip returning the 'source' architecture entry
2697 @type skipall: boolean
2698 @param skipall: Whether to skip returning the 'all' architecture entry
2701 @type session: Session
2702 @param session: Optional SQL session object (a temporary one will be
2703 generated if not supplied)
2706 @return: list of Architecture objects for the given name (may be empty)
2709 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2711 __all__.append('get_suite_architectures')
2713 ################################################################################
2715 class SuiteSrcFormat(object):
2716 def __init__(self, *args, **kwargs):
2720 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2722 __all__.append('SuiteSrcFormat')
2725 def get_suite_src_formats(suite, session=None):
2727 Returns list of allowed SrcFormat for C{suite}.
2730 @param suite: Suite name to search for
2732 @type session: Session
2733 @param session: Optional SQL session object (a temporary one will be
2734 generated if not supplied)
2737 @return: the list of allowed source formats for I{suite}
2740 q = session.query(SrcFormat)
2741 q = q.join(SuiteSrcFormat)
2742 q = q.join(Suite).filter_by(suite_name=suite)
2743 q = q.order_by('format_name')
2747 __all__.append('get_suite_src_formats')
2749 ################################################################################
2752 def __init__(self, uid = None, name = None):
2756 def __eq__(self, val):
2757 if isinstance(val, str):
2758 return (self.uid == val)
2759 # This signals to use the normal comparison operator
2760 return NotImplemented
2762 def __ne__(self, val):
2763 if isinstance(val, str):
2764 return (self.uid != val)
2765 # This signals to use the normal comparison operator
2766 return NotImplemented
2769 return '<Uid %s (%s)>' % (self.uid, self.name)
2771 __all__.append('Uid')
2774 def get_or_set_uid(uidname, session=None):
2776 Returns uid object for given uidname.
2778 If no matching uidname is found, a row is inserted.
2780 @type uidname: string
2781 @param uidname: The uid to add
2783 @type session: SQLAlchemy
2784 @param session: Optional SQL session object (a temporary one will be
2785 generated if not supplied). If not passed, a commit will be performed at
2786 the end of the function, otherwise the caller is responsible for commiting.
2789 @return: the uid object for the given uidname
2792 q = session.query(Uid).filter_by(uid=uidname)
2796 except NoResultFound:
2800 session.commit_or_flush()
2805 __all__.append('get_or_set_uid')
2808 def get_uid_from_fingerprint(fpr, session=None):
2809 q = session.query(Uid)
2810 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2814 except NoResultFound:
2817 __all__.append('get_uid_from_fingerprint')
2819 ################################################################################
2821 class UploadBlock(object):
2822 def __init__(self, *args, **kwargs):
2826 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2828 __all__.append('UploadBlock')
2830 ################################################################################
2832 class DBConn(object):
2834 database module init.
2838 def __init__(self, *args, **kwargs):
2839 self.__dict__ = self.__shared_state
2841 if not getattr(self, 'initialised', False):
2842 self.initialised = True
2843 self.debug = kwargs.has_key('debug')
2846 def __setuptables(self):
2847 tables_with_primary = (
2858 'changes_pending_binaries',
2859 'changes_pending_files',
2860 'changes_pending_source',
2870 'pending_bin_contents',
2882 # The following tables have primary keys but sqlalchemy
2883 # version 0.5 fails to reflect them correctly with database
2884 # versions before upgrade #41.
2886 #'build_queue_files',
2889 tables_no_primary = (
2891 'changes_pending_files_map',
2892 'changes_pending_source_files',
2893 'changes_pool_files',
2896 'suite_architectures',
2897 'suite_src_formats',
2898 'suite_build_queue_copy',
2900 # see the comment above
2902 'build_queue_files',
2906 'almost_obsolete_all_associations',
2907 'almost_obsolete_src_associations',
2908 'any_associations_source',
2909 'bin_assoc_by_arch',
2910 'bin_associations_binaries',
2911 'binaries_suite_arch',
2912 'binfiles_suite_component_arch',
2915 'newest_all_associations',
2916 'newest_any_associations',
2918 'newest_src_association',
2919 'obsolete_all_associations',
2920 'obsolete_any_associations',
2921 'obsolete_any_by_all_associations',
2922 'obsolete_src_associations',
2924 'src_associations_bin',
2925 'src_associations_src',
2926 'suite_arch_by_name',
2929 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2930 # correctly and that is why we have to use a workaround. It can
2931 # be removed as soon as we switch to version 0.6.
2932 for table_name in tables_with_primary:
2933 table = Table(table_name, self.db_meta, \
2934 Column('id', Integer, primary_key = True), \
2935 autoload=True, useexisting=True)
2936 setattr(self, 'tbl_%s' % table_name, table)
2938 for table_name in tables_no_primary:
2939 table = Table(table_name, self.db_meta, autoload=True)
2940 setattr(self, 'tbl_%s' % table_name, table)
2942 for view_name in views:
2943 view = Table(view_name, self.db_meta, autoload=True)
2944 setattr(self, 'view_%s' % view_name, view)
2946 def __setupmappers(self):
2947 mapper(Architecture, self.tbl_architecture,
2948 properties = dict(arch_id = self.tbl_architecture.c.id,
2949 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2950 order_by='suite_name',
2951 backref=backref('architectures', order_by='arch_string'))),
2952 extension = validator)
2954 mapper(Archive, self.tbl_archive,
2955 properties = dict(archive_id = self.tbl_archive.c.id,
2956 archive_name = self.tbl_archive.c.name))
2958 mapper(BinAssociation, self.tbl_bin_associations,
2959 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2960 suite_id = self.tbl_bin_associations.c.suite,
2961 suite = relation(Suite),
2962 binary_id = self.tbl_bin_associations.c.bin,
2963 binary = relation(DBBinary)))
2965 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2966 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2967 filename = self.tbl_pending_bin_contents.c.filename,
2968 package = self.tbl_pending_bin_contents.c.package,
2969 version = self.tbl_pending_bin_contents.c.version,
2970 arch = self.tbl_pending_bin_contents.c.arch,
2971 otype = self.tbl_pending_bin_contents.c.type))
2973 mapper(DebContents, self.tbl_deb_contents,
2974 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2975 package=self.tbl_deb_contents.c.package,
2976 suite=self.tbl_deb_contents.c.suite,
2977 arch=self.tbl_deb_contents.c.arch,
2978 section=self.tbl_deb_contents.c.section,
2979 filename=self.tbl_deb_contents.c.filename))
2981 mapper(UdebContents, self.tbl_udeb_contents,
2982 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2983 package=self.tbl_udeb_contents.c.package,
2984 suite=self.tbl_udeb_contents.c.suite,
2985 arch=self.tbl_udeb_contents.c.arch,
2986 section=self.tbl_udeb_contents.c.section,
2987 filename=self.tbl_udeb_contents.c.filename))
2989 mapper(BuildQueue, self.tbl_build_queue,
2990 properties = dict(queue_id = self.tbl_build_queue.c.id))
2992 mapper(BuildQueueFile, self.tbl_build_queue_files,
2993 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2994 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2996 mapper(DBBinary, self.tbl_binaries,
2997 properties = dict(binary_id = self.tbl_binaries.c.id,
2998 package = self.tbl_binaries.c.package,
2999 version = self.tbl_binaries.c.version,
3000 maintainer_id = self.tbl_binaries.c.maintainer,
3001 maintainer = relation(Maintainer),
3002 source_id = self.tbl_binaries.c.source,
3003 source = relation(DBSource),
3004 arch_id = self.tbl_binaries.c.architecture,
3005 architecture = relation(Architecture),
3006 poolfile_id = self.tbl_binaries.c.file,
3007 poolfile = relation(PoolFile),
3008 binarytype = self.tbl_binaries.c.type,
3009 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3010 fingerprint = relation(Fingerprint),
3011 install_date = self.tbl_binaries.c.install_date,
3012 binassociations = relation(BinAssociation,
3013 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
3015 mapper(BinaryACL, self.tbl_binary_acl,
3016 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3018 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3019 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3020 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3021 architecture = relation(Architecture)))
3023 mapper(Component, self.tbl_component,
3024 properties = dict(component_id = self.tbl_component.c.id,
3025 component_name = self.tbl_component.c.name))
3027 mapper(DBConfig, self.tbl_config,
3028 properties = dict(config_id = self.tbl_config.c.id))
3030 mapper(DSCFile, self.tbl_dsc_files,
3031 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3032 source_id = self.tbl_dsc_files.c.source,
3033 source = relation(DBSource),
3034 poolfile_id = self.tbl_dsc_files.c.file,
3035 poolfile = relation(PoolFile)))
3037 mapper(PoolFile, self.tbl_files,
3038 properties = dict(file_id = self.tbl_files.c.id,
3039 filesize = self.tbl_files.c.size,
3040 location_id = self.tbl_files.c.location,
3041 location = relation(Location,
3042 # using lazy='dynamic' in the back
3043 # reference because we have A LOT of
3044 # files in one location
3045 backref=backref('files', lazy='dynamic'))))
3047 mapper(Fingerprint, self.tbl_fingerprint,
3048 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3049 uid_id = self.tbl_fingerprint.c.uid,
3050 uid = relation(Uid),
3051 keyring_id = self.tbl_fingerprint.c.keyring,
3052 keyring = relation(Keyring),
3053 source_acl = relation(SourceACL),
3054 binary_acl = relation(BinaryACL)))
3056 mapper(Keyring, self.tbl_keyrings,
3057 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3058 keyring_id = self.tbl_keyrings.c.id))
3060 mapper(DBChange, self.tbl_changes,
3061 properties = dict(change_id = self.tbl_changes.c.id,
3062 poolfiles = relation(PoolFile,
3063 secondary=self.tbl_changes_pool_files,
3064 backref="changeslinks"),
3065 seen = self.tbl_changes.c.seen,
3066 source = self.tbl_changes.c.source,
3067 binaries = self.tbl_changes.c.binaries,
3068 architecture = self.tbl_changes.c.architecture,
3069 distribution = self.tbl_changes.c.distribution,
3070 urgency = self.tbl_changes.c.urgency,
3071 maintainer = self.tbl_changes.c.maintainer,
3072 changedby = self.tbl_changes.c.changedby,
3073 date = self.tbl_changes.c.date,
3074 version = self.tbl_changes.c.version,
3075 files = relation(ChangePendingFile,
3076 secondary=self.tbl_changes_pending_files_map,
3077 backref="changesfile"),
3078 in_queue_id = self.tbl_changes.c.in_queue,
3079 in_queue = relation(PolicyQueue,
3080 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3081 approved_for_id = self.tbl_changes.c.approved_for))
3083 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3084 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3086 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3087 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3088 filename = self.tbl_changes_pending_files.c.filename,
3089 size = self.tbl_changes_pending_files.c.size,
3090 md5sum = self.tbl_changes_pending_files.c.md5sum,
3091 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3092 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3094 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3095 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3096 change = relation(DBChange),
3097 maintainer = relation(Maintainer,
3098 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3099 changedby = relation(Maintainer,
3100 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3101 fingerprint = relation(Fingerprint),
3102 source_files = relation(ChangePendingFile,
3103 secondary=self.tbl_changes_pending_source_files,
3104 backref="pending_sources")))
3107 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3108 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3109 keyring = relation(Keyring, backref="keyring_acl_map"),
3110 architecture = relation(Architecture)))
3112 mapper(Location, self.tbl_location,
3113 properties = dict(location_id = self.tbl_location.c.id,
3114 component_id = self.tbl_location.c.component,
3115 component = relation(Component),
3116 archive_id = self.tbl_location.c.archive,
3117 archive = relation(Archive),
3118 # FIXME: the 'type' column is old cruft and
3119 # should be removed in the future.
3120 archive_type = self.tbl_location.c.type))
3122 mapper(Maintainer, self.tbl_maintainer,
3123 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3124 maintains_sources = relation(DBSource, backref='maintainer',
3125 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3126 changed_sources = relation(DBSource, backref='changedby',
3127 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))))
3129 mapper(NewComment, self.tbl_new_comments,
3130 properties = dict(comment_id = self.tbl_new_comments.c.id))
3132 mapper(Override, self.tbl_override,
3133 properties = dict(suite_id = self.tbl_override.c.suite,
3134 suite = relation(Suite),
3135 package = self.tbl_override.c.package,
3136 component_id = self.tbl_override.c.component,
3137 component = relation(Component),
3138 priority_id = self.tbl_override.c.priority,
3139 priority = relation(Priority),
3140 section_id = self.tbl_override.c.section,
3141 section = relation(Section),
3142 overridetype_id = self.tbl_override.c.type,
3143 overridetype = relation(OverrideType)))
3145 mapper(OverrideType, self.tbl_override_type,
3146 properties = dict(overridetype = self.tbl_override_type.c.type,
3147 overridetype_id = self.tbl_override_type.c.id))
3149 mapper(PolicyQueue, self.tbl_policy_queue,
3150 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3152 mapper(Priority, self.tbl_priority,
3153 properties = dict(priority_id = self.tbl_priority.c.id))
3155 mapper(Section, self.tbl_section,
3156 properties = dict(section_id = self.tbl_section.c.id,
3157 section=self.tbl_section.c.section))
3159 mapper(DBSource, self.tbl_source,
3160 properties = dict(source_id = self.tbl_source.c.id,
3161 version = self.tbl_source.c.version,
3162 maintainer_id = self.tbl_source.c.maintainer,
3163 poolfile_id = self.tbl_source.c.file,
3164 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3165 fingerprint_id = self.tbl_source.c.sig_fpr,
3166 fingerprint = relation(Fingerprint),
3167 changedby_id = self.tbl_source.c.changedby,
3168 srcfiles = relation(DSCFile,
3169 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3170 suites = relation(Suite, secondary=self.tbl_src_associations,
3172 srcuploaders = relation(SrcUploader)))
3174 mapper(SourceACL, self.tbl_source_acl,
3175 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3177 mapper(SrcFormat, self.tbl_src_format,
3178 properties = dict(src_format_id = self.tbl_src_format.c.id,
3179 format_name = self.tbl_src_format.c.format_name))
3181 mapper(SrcUploader, self.tbl_src_uploaders,
3182 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3183 source_id = self.tbl_src_uploaders.c.source,
3184 source = relation(DBSource,
3185 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3186 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3187 maintainer = relation(Maintainer,
3188 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3190 mapper(Suite, self.tbl_suite,
3191 properties = dict(suite_id = self.tbl_suite.c.id,
3192 policy_queue = relation(PolicyQueue),
3193 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3195 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3196 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3197 suite = relation(Suite, backref='suitesrcformats'),
3198 src_format_id = self.tbl_suite_src_formats.c.src_format,
3199 src_format = relation(SrcFormat)))
3201 mapper(Uid, self.tbl_uid,
3202 properties = dict(uid_id = self.tbl_uid.c.id,
3203 fingerprint = relation(Fingerprint)))
3205 mapper(UploadBlock, self.tbl_upload_blocks,
3206 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3207 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3208 uid = relation(Uid, backref="uploadblocks")))
3210 ## Connection functions
3211 def __createconn(self):
3212 from config import Config
3216 connstr = "postgres://%s" % cnf["DB::Host"]
3217 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3218 connstr += ":%s" % cnf["DB::Port"]
3219 connstr += "/%s" % cnf["DB::Name"]
3222 connstr = "postgres:///%s" % cnf["DB::Name"]
3223 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3224 connstr += "?port=%s" % cnf["DB::Port"]
3226 self.db_pg = create_engine(connstr, echo=self.debug)
3227 self.db_meta = MetaData()
3228 self.db_meta.bind = self.db_pg
3229 self.db_smaker = sessionmaker(bind=self.db_pg,
3233 self.__setuptables()
3234 self.__setupmappers()
3237 return self.db_smaker()
3239 __all__.append('DBConn')