5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
275 __all__.append('ORMObject')
277 ################################################################################
279 class Validator(MapperExtension):
281 This class calls the validate() method for each instance for the
282 'before_update' and 'before_insert' events. A global object validator is
283 used for configuring the individual mappers.
286 def before_update(self, mapper, connection, instance):
290 def before_insert(self, mapper, connection, instance):
294 validator = Validator()
296 ################################################################################
298 class Architecture(ORMObject):
299 def __init__(self, arch_string = None, description = None):
300 self.arch_string = arch_string
301 self.description = description
303 def __eq__(self, val):
304 if isinstance(val, str):
305 return (self.arch_string== val)
306 # This signals to use the normal comparison operator
307 return NotImplemented
309 def __ne__(self, val):
310 if isinstance(val, str):
311 return (self.arch_string != val)
312 # This signals to use the normal comparison operator
313 return NotImplemented
315 def properties(self):
316 return ['arch_string', 'arch_id', 'suites_count']
318 def not_null_constraints(self):
319 return ['arch_string']
321 __all__.append('Architecture')
324 def get_architecture(architecture, session=None):
326 Returns database id for given C{architecture}.
328 @type architecture: string
329 @param architecture: The name of the architecture
331 @type session: Session
332 @param session: Optional SQLA session object (a temporary one will be
333 generated if not supplied)
336 @return: Architecture object for the given arch (None if not present)
339 q = session.query(Architecture).filter_by(arch_string=architecture)
343 except NoResultFound:
346 __all__.append('get_architecture')
348 # TODO: should be removed because the implementation is too trivial
350 def get_architecture_suites(architecture, session=None):
352 Returns list of Suite objects for given C{architecture} name
354 @type architecture: str
355 @param architecture: Architecture name to search for
357 @type session: Session
358 @param session: Optional SQL session object (a temporary one will be
359 generated if not supplied)
362 @return: list of Suite objects for the given name (may be empty)
365 return get_architecture(architecture, session).suites
367 __all__.append('get_architecture_suites')
369 ################################################################################
371 class Archive(object):
372 def __init__(self, *args, **kwargs):
376 return '<Archive %s>' % self.archive_name
378 __all__.append('Archive')
381 def get_archive(archive, session=None):
383 returns database id for given C{archive}.
385 @type archive: string
386 @param archive: the name of the arhive
388 @type session: Session
389 @param session: Optional SQLA session object (a temporary one will be
390 generated if not supplied)
393 @return: Archive object for the given name (None if not present)
396 archive = archive.lower()
398 q = session.query(Archive).filter_by(archive_name=archive)
402 except NoResultFound:
405 __all__.append('get_archive')
407 ################################################################################
409 class BinAssociation(object):
410 def __init__(self, *args, **kwargs):
414 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
416 __all__.append('BinAssociation')
418 ################################################################################
420 class BinContents(object):
421 def __init__(self, *args, **kwargs):
425 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
427 __all__.append('BinContents')
429 ################################################################################
431 class DBBinary(object):
432 def __init__(self, *args, **kwargs):
436 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
438 __all__.append('DBBinary')
441 def get_suites_binary_in(package, session=None):
443 Returns list of Suite objects which given C{package} name is in
446 @param package: DBBinary package name to search for
449 @return: list of Suite objects for the given package
452 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
454 __all__.append('get_suites_binary_in')
457 def get_binary_from_id(binary_id, session=None):
459 Returns DBBinary object for given C{id}
462 @param binary_id: Id of the required binary
464 @type session: Session
465 @param session: Optional SQLA session object (a temporary one will be
466 generated if not supplied)
469 @return: DBBinary object for the given binary (None if not present)
472 q = session.query(DBBinary).filter_by(binary_id=binary_id)
476 except NoResultFound:
479 __all__.append('get_binary_from_id')
482 def get_binaries_from_name(package, version=None, architecture=None, session=None):
484 Returns list of DBBinary objects for given C{package} name
487 @param package: DBBinary package name to search for
489 @type version: str or None
490 @param version: Version to search for (or None)
492 @type architecture: str, list or None
493 @param architecture: Architectures to limit to (or None if no limit)
495 @type session: Session
496 @param session: Optional SQL session object (a temporary one will be
497 generated if not supplied)
500 @return: list of DBBinary objects for the given name (may be empty)
503 q = session.query(DBBinary).filter_by(package=package)
505 if version is not None:
506 q = q.filter_by(version=version)
508 if architecture is not None:
509 if not isinstance(architecture, list):
510 architecture = [architecture]
511 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
517 __all__.append('get_binaries_from_name')
520 def get_binaries_from_source_id(source_id, session=None):
522 Returns list of DBBinary objects for given C{source_id}
525 @param source_id: source_id to search for
527 @type session: Session
528 @param session: Optional SQL session object (a temporary one will be
529 generated if not supplied)
532 @return: list of DBBinary objects for the given name (may be empty)
535 return session.query(DBBinary).filter_by(source_id=source_id).all()
537 __all__.append('get_binaries_from_source_id')
540 def get_binary_from_name_suite(package, suitename, session=None):
541 ### For dak examine-package
542 ### XXX: Doesn't use object API yet
544 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
545 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
546 WHERE b.package='%(package)s'
548 AND fi.location = l.id
549 AND l.component = c.id
552 AND su.suite_name %(suitename)s
553 ORDER BY b.version DESC"""
555 return session.execute(sql % {'package': package, 'suitename': suitename})
557 __all__.append('get_binary_from_name_suite')
560 def get_binary_components(package, suitename, arch, session=None):
561 # Check for packages that have moved from one component to another
562 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
563 WHERE b.package=:package AND s.suite_name=:suitename
564 AND (a.arch_string = :arch OR a.arch_string = 'all')
565 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
566 AND f.location = l.id
567 AND l.component = c.id
570 vals = {'package': package, 'suitename': suitename, 'arch': arch}
572 return session.execute(query, vals)
574 __all__.append('get_binary_components')
576 ################################################################################
578 class BinaryACL(object):
579 def __init__(self, *args, **kwargs):
583 return '<BinaryACL %s>' % self.binary_acl_id
585 __all__.append('BinaryACL')
587 ################################################################################
589 class BinaryACLMap(object):
590 def __init__(self, *args, **kwargs):
594 return '<BinaryACLMap %s>' % self.binary_acl_map_id
596 __all__.append('BinaryACLMap')
598 ################################################################################
603 ArchiveDir "%(archivepath)s";
604 OverrideDir "%(overridedir)s";
605 CacheDir "%(cachedir)s";
610 Packages::Compress ". bzip2 gzip";
611 Sources::Compress ". bzip2 gzip";
616 bindirectory "incoming"
621 BinOverride "override.sid.all3";
622 BinCacheDB "packages-accepted.db";
624 FileList "%(filelist)s";
627 Packages::Extensions ".deb .udeb";
630 bindirectory "incoming/"
633 BinOverride "override.sid.all3";
634 SrcOverride "override.sid.all3.src";
635 FileList "%(filelist)s";
639 class BuildQueue(object):
640 def __init__(self, *args, **kwargs):
644 return '<BuildQueue %s>' % self.queue_name
646 def write_metadata(self, starttime, force=False):
647 # Do we write out metafiles?
648 if not (force or self.generate_metadata):
651 session = DBConn().session().object_session(self)
653 fl_fd = fl_name = ac_fd = ac_name = None
655 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
656 startdir = os.getcwd()
659 # Grab files we want to include
660 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
661 # Write file list with newer files
662 (fl_fd, fl_name) = mkstemp()
664 os.write(fl_fd, '%s\n' % n.fullpath)
669 # Write minimal apt.conf
670 # TODO: Remove hardcoding from template
671 (ac_fd, ac_name) = mkstemp()
672 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
674 'cachedir': cnf["Dir::Cache"],
675 'overridedir': cnf["Dir::Override"],
679 # Run apt-ftparchive generate
680 os.chdir(os.path.dirname(ac_name))
681 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
683 # Run apt-ftparchive release
684 # TODO: Eww - fix this
685 bname = os.path.basename(self.path)
689 # We have to remove the Release file otherwise it'll be included in the
692 os.unlink(os.path.join(bname, 'Release'))
696 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
698 # Crude hack with open and append, but this whole section is and should be redone.
699 if self.notautomatic:
700 release=open("Release", "a")
701 release.write("NotAutomatic: yes")
706 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
707 if cnf.has_key("Dinstall::SigningPubKeyring"):
708 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
710 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
712 # Move the files if we got this far
713 os.rename('Release', os.path.join(bname, 'Release'))
715 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
717 # Clean up any left behind files
744 def clean_and_update(self, starttime, Logger, dryrun=False):
745 """WARNING: This routine commits for you"""
746 session = DBConn().session().object_session(self)
748 if self.generate_metadata and not dryrun:
749 self.write_metadata(starttime)
751 # Grab files older than our execution time
752 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
758 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
760 Logger.log(["I: Removing %s from the queue" % o.fullpath])
761 os.unlink(o.fullpath)
764 # If it wasn't there, don't worry
765 if e.errno == ENOENT:
768 # TODO: Replace with proper logging call
769 Logger.log(["E: Could not remove %s" % o.fullpath])
776 for f in os.listdir(self.path):
777 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
781 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
782 except NoResultFound:
783 fp = os.path.join(self.path, f)
785 Logger.log(["I: Would remove unused link %s" % fp])
787 Logger.log(["I: Removing unused link %s" % fp])
791 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
793 def add_file_from_pool(self, poolfile):
794 """Copies a file into the pool. Assumes that the PoolFile object is
795 attached to the same SQLAlchemy session as the Queue object is.
797 The caller is responsible for committing after calling this function."""
798 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
800 # Check if we have a file of this name or this ID already
801 for f in self.queuefiles:
802 if f.fileid is not None and f.fileid == poolfile.file_id or \
803 f.poolfile.filename == poolfile_basename:
804 # In this case, update the BuildQueueFile entry so we
805 # don't remove it too early
806 f.lastused = datetime.now()
807 DBConn().session().object_session(poolfile).add(f)
810 # Prepare BuildQueueFile object
811 qf = BuildQueueFile()
812 qf.build_queue_id = self.queue_id
813 qf.lastused = datetime.now()
814 qf.filename = poolfile_basename
816 targetpath = poolfile.fullpath
817 queuepath = os.path.join(self.path, poolfile_basename)
821 # We need to copy instead of symlink
823 utils.copy(targetpath, queuepath)
824 # NULL in the fileid field implies a copy
827 os.symlink(targetpath, queuepath)
828 qf.fileid = poolfile.file_id
832 # Get the same session as the PoolFile is using and add the qf to it
833 DBConn().session().object_session(poolfile).add(qf)
838 __all__.append('BuildQueue')
841 def get_build_queue(queuename, session=None):
843 Returns BuildQueue object for given C{queue name}, creating it if it does not
846 @type queuename: string
847 @param queuename: The name of the queue
849 @type session: Session
850 @param session: Optional SQLA session object (a temporary one will be
851 generated if not supplied)
854 @return: BuildQueue object for the given queue
857 q = session.query(BuildQueue).filter_by(queue_name=queuename)
861 except NoResultFound:
864 __all__.append('get_build_queue')
866 ################################################################################
868 class BuildQueueFile(object):
869 def __init__(self, *args, **kwargs):
873 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
877 return os.path.join(self.buildqueue.path, self.filename)
880 __all__.append('BuildQueueFile')
882 ################################################################################
884 class ChangePendingBinary(object):
885 def __init__(self, *args, **kwargs):
889 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
891 __all__.append('ChangePendingBinary')
893 ################################################################################
895 class ChangePendingFile(object):
896 def __init__(self, *args, **kwargs):
900 return '<ChangePendingFile %s>' % self.change_pending_file_id
902 __all__.append('ChangePendingFile')
904 ################################################################################
906 class ChangePendingSource(object):
907 def __init__(self, *args, **kwargs):
911 return '<ChangePendingSource %s>' % self.change_pending_source_id
913 __all__.append('ChangePendingSource')
915 ################################################################################
917 class Component(object):
918 def __init__(self, *args, **kwargs):
921 def __eq__(self, val):
922 if isinstance(val, str):
923 return (self.component_name == val)
924 # This signals to use the normal comparison operator
925 return NotImplemented
927 def __ne__(self, val):
928 if isinstance(val, str):
929 return (self.component_name != val)
930 # This signals to use the normal comparison operator
931 return NotImplemented
934 return '<Component %s>' % self.component_name
937 __all__.append('Component')
940 def get_component(component, session=None):
942 Returns database id for given C{component}.
944 @type component: string
945 @param component: The name of the override type
948 @return: the database id for the given component
951 component = component.lower()
953 q = session.query(Component).filter_by(component_name=component)
957 except NoResultFound:
960 __all__.append('get_component')
962 ################################################################################
964 class DBConfig(object):
965 def __init__(self, *args, **kwargs):
969 return '<DBConfig %s>' % self.name
971 __all__.append('DBConfig')
973 ################################################################################
976 def get_or_set_contents_file_id(filename, session=None):
978 Returns database id for given filename.
980 If no matching file is found, a row is inserted.
982 @type filename: string
983 @param filename: The filename
984 @type session: SQLAlchemy
985 @param session: Optional SQL session object (a temporary one will be
986 generated if not supplied). If not passed, a commit will be performed at
987 the end of the function, otherwise the caller is responsible for commiting.
990 @return: the database id for the given component
993 q = session.query(ContentFilename).filter_by(filename=filename)
996 ret = q.one().cafilename_id
997 except NoResultFound:
998 cf = ContentFilename()
999 cf.filename = filename
1001 session.commit_or_flush()
1002 ret = cf.cafilename_id
1006 __all__.append('get_or_set_contents_file_id')
1009 def get_contents(suite, overridetype, section=None, session=None):
1011 Returns contents for a suite / overridetype combination, limiting
1012 to a section if not None.
1015 @param suite: Suite object
1017 @type overridetype: OverrideType
1018 @param overridetype: OverrideType object
1020 @type section: Section
1021 @param section: Optional section object to limit results to
1023 @type session: SQLAlchemy
1024 @param session: Optional SQL session object (a temporary one will be
1025 generated if not supplied)
1027 @rtype: ResultsProxy
1028 @return: ResultsProxy object set up to return tuples of (filename, section,
1032 # find me all of the contents for a given suite
1033 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1037 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1038 JOIN content_file_names n ON (c.filename=n.id)
1039 JOIN binaries b ON (b.id=c.binary_pkg)
1040 JOIN override o ON (o.package=b.package)
1041 JOIN section s ON (s.id=o.section)
1042 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1043 AND b.type=:overridetypename"""
1045 vals = {'suiteid': suite.suite_id,
1046 'overridetypeid': overridetype.overridetype_id,
1047 'overridetypename': overridetype.overridetype}
1049 if section is not None:
1050 contents_q += " AND s.id = :sectionid"
1051 vals['sectionid'] = section.section_id
1053 contents_q += " ORDER BY fn"
1055 return session.execute(contents_q, vals)
1057 __all__.append('get_contents')
1059 ################################################################################
1061 class ContentFilepath(object):
1062 def __init__(self, *args, **kwargs):
1066 return '<ContentFilepath %s>' % self.filepath
1068 __all__.append('ContentFilepath')
1071 def get_or_set_contents_path_id(filepath, session=None):
1073 Returns database id for given path.
1075 If no matching file is found, a row is inserted.
1077 @type filepath: string
1078 @param filepath: The filepath
1080 @type session: SQLAlchemy
1081 @param session: Optional SQL session object (a temporary one will be
1082 generated if not supplied). If not passed, a commit will be performed at
1083 the end of the function, otherwise the caller is responsible for commiting.
1086 @return: the database id for the given path
1089 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1092 ret = q.one().cafilepath_id
1093 except NoResultFound:
1094 cf = ContentFilepath()
1095 cf.filepath = filepath
1097 session.commit_or_flush()
1098 ret = cf.cafilepath_id
1102 __all__.append('get_or_set_contents_path_id')
1104 ################################################################################
1106 class ContentAssociation(object):
1107 def __init__(self, *args, **kwargs):
1111 return '<ContentAssociation %s>' % self.ca_id
1113 __all__.append('ContentAssociation')
1115 def insert_content_paths(binary_id, fullpaths, session=None):
1117 Make sure given path is associated with given binary id
1119 @type binary_id: int
1120 @param binary_id: the id of the binary
1121 @type fullpaths: list
1122 @param fullpaths: the list of paths of the file being associated with the binary
1123 @type session: SQLAlchemy session
1124 @param session: Optional SQLAlchemy session. If this is passed, the caller
1125 is responsible for ensuring a transaction has begun and committing the
1126 results or rolling back based on the result code. If not passed, a commit
1127 will be performed at the end of the function, otherwise the caller is
1128 responsible for commiting.
1130 @return: True upon success
1133 privatetrans = False
1135 session = DBConn().session()
1140 def generate_path_dicts():
1141 for fullpath in fullpaths:
1142 if fullpath.startswith( './' ):
1143 fullpath = fullpath[2:]
1145 yield {'filename':fullpath, 'id': binary_id }
1147 for d in generate_path_dicts():
1148 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1157 traceback.print_exc()
1159 # Only rollback if we set up the session ourself
1166 __all__.append('insert_content_paths')
1168 ################################################################################
1170 class DSCFile(object):
1171 def __init__(self, *args, **kwargs):
1175 return '<DSCFile %s>' % self.dscfile_id
1177 __all__.append('DSCFile')
1180 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1182 Returns a list of DSCFiles which may be empty
1184 @type dscfile_id: int (optional)
1185 @param dscfile_id: the dscfile_id of the DSCFiles to find
1187 @type source_id: int (optional)
1188 @param source_id: the source id related to the DSCFiles to find
1190 @type poolfile_id: int (optional)
1191 @param poolfile_id: the poolfile id related to the DSCFiles to find
1194 @return: Possibly empty list of DSCFiles
1197 q = session.query(DSCFile)
1199 if dscfile_id is not None:
1200 q = q.filter_by(dscfile_id=dscfile_id)
1202 if source_id is not None:
1203 q = q.filter_by(source_id=source_id)
1205 if poolfile_id is not None:
1206 q = q.filter_by(poolfile_id=poolfile_id)
1210 __all__.append('get_dscfiles')
1212 ################################################################################
1214 class PoolFile(ORMObject):
1215 def __init__(self, filename = None, location = None, filesize = -1, \
1217 self.filename = filename
1218 self.location = location
1219 self.filesize = filesize
1220 self.md5sum = md5sum
1224 return os.path.join(self.location.path, self.filename)
1226 def is_valid(self, filesize = -1, md5sum = None):\
1227 return self.filesize == filesize and self.md5sum == md5sum
1229 def properties(self):
1230 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1231 'sha256sum', 'location', 'source', 'last_used']
1233 def not_null_constraints(self):
1234 return ['filename', 'md5sum', 'location']
1236 __all__.append('PoolFile')
1239 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1242 (ValidFileFound [boolean], PoolFile object or None)
1244 @type filename: string
1245 @param filename: the filename of the file to check against the DB
1248 @param filesize: the size of the file to check against the DB
1250 @type md5sum: string
1251 @param md5sum: the md5sum of the file to check against the DB
1253 @type location_id: int
1254 @param location_id: the id of the location to look in
1257 @return: Tuple of length 2.
1258 - If valid pool file found: (C{True}, C{PoolFile object})
1259 - If valid pool file not found:
1260 - (C{False}, C{None}) if no file found
1261 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1264 poolfile = session.query(Location).get(location_id). \
1265 files.filter_by(filename=filename).first()
1267 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1270 return (valid, poolfile)
1272 __all__.append('check_poolfile')
1274 # TODO: the implementation can trivially be inlined at the place where the
1275 # function is called
1277 def get_poolfile_by_id(file_id, session=None):
1279 Returns a PoolFile objects or None for the given id
1282 @param file_id: the id of the file to look for
1284 @rtype: PoolFile or None
1285 @return: either the PoolFile object or None
1288 return session.query(PoolFile).get(file_id)
1290 __all__.append('get_poolfile_by_id')
1293 def get_poolfile_like_name(filename, session=None):
1295 Returns an array of PoolFile objects which are like the given name
1297 @type filename: string
1298 @param filename: the filename of the file to check against the DB
1301 @return: array of PoolFile objects
1304 # TODO: There must be a way of properly using bind parameters with %FOO%
1305 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1309 __all__.append('get_poolfile_like_name')
1312 def add_poolfile(filename, datadict, location_id, session=None):
1314 Add a new file to the pool
1316 @type filename: string
1317 @param filename: filename
1319 @type datadict: dict
1320 @param datadict: dict with needed data
1322 @type location_id: int
1323 @param location_id: database id of the location
1326 @return: the PoolFile object created
1328 poolfile = PoolFile()
1329 poolfile.filename = filename
1330 poolfile.filesize = datadict["size"]
1331 poolfile.md5sum = datadict["md5sum"]
1332 poolfile.sha1sum = datadict["sha1sum"]
1333 poolfile.sha256sum = datadict["sha256sum"]
1334 poolfile.location_id = location_id
1336 session.add(poolfile)
1337 # Flush to get a file id (NB: This is not a commit)
1342 __all__.append('add_poolfile')
1344 ################################################################################
1346 class Fingerprint(ORMObject):
1347 def __init__(self, fingerprint = None):
1348 self.fingerprint = fingerprint
1350 def properties(self):
1351 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1354 def not_null_constraints(self):
1355 return ['fingerprint']
1357 __all__.append('Fingerprint')
1360 def get_fingerprint(fpr, session=None):
1362 Returns Fingerprint object for given fpr.
1365 @param fpr: The fpr to find / add
1367 @type session: SQLAlchemy
1368 @param session: Optional SQL session object (a temporary one will be
1369 generated if not supplied).
1372 @return: the Fingerprint object for the given fpr or None
1375 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1379 except NoResultFound:
1384 __all__.append('get_fingerprint')
1387 def get_or_set_fingerprint(fpr, session=None):
1389 Returns Fingerprint object for given fpr.
1391 If no matching fpr is found, a row is inserted.
1394 @param fpr: The fpr to find / add
1396 @type session: SQLAlchemy
1397 @param session: Optional SQL session object (a temporary one will be
1398 generated if not supplied). If not passed, a commit will be performed at
1399 the end of the function, otherwise the caller is responsible for commiting.
1400 A flush will be performed either way.
1403 @return: the Fingerprint object for the given fpr
1406 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1410 except NoResultFound:
1411 fingerprint = Fingerprint()
1412 fingerprint.fingerprint = fpr
1413 session.add(fingerprint)
1414 session.commit_or_flush()
1419 __all__.append('get_or_set_fingerprint')
1421 ################################################################################
1423 # Helper routine for Keyring class
1424 def get_ldap_name(entry):
1426 for k in ["cn", "mn", "sn"]:
1428 if ret and ret[0] != "" and ret[0] != "-":
1430 return " ".join(name)
1432 ################################################################################
1434 class Keyring(object):
1435 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1436 " --with-colons --fingerprint --fingerprint"
1441 def __init__(self, *args, **kwargs):
1445 return '<Keyring %s>' % self.keyring_name
1447 def de_escape_gpg_str(self, txt):
1448 esclist = re.split(r'(\\x..)', txt)
1449 for x in range(1,len(esclist),2):
1450 esclist[x] = "%c" % (int(esclist[x][2:],16))
1451 return "".join(esclist)
1453 def parse_address(self, uid):
1454 """parses uid and returns a tuple of real name and email address"""
1456 (name, address) = email.Utils.parseaddr(uid)
1457 name = re.sub(r"\s*[(].*[)]", "", name)
1458 name = self.de_escape_gpg_str(name)
1461 return (name, address)
1463 def load_keys(self, keyring):
1464 if not self.keyring_id:
1465 raise Exception('Must be initialized with database information')
1467 k = os.popen(self.gpg_invocation % keyring, "r")
1471 for line in k.xreadlines():
1472 field = line.split(":")
1473 if field[0] == "pub":
1476 (name, addr) = self.parse_address(field[9])
1478 self.keys[key]["email"] = addr
1479 self.keys[key]["name"] = name
1480 self.keys[key]["fingerprints"] = []
1482 elif key and field[0] == "sub" and len(field) >= 12:
1483 signingkey = ("s" in field[11])
1484 elif key and field[0] == "uid":
1485 (name, addr) = self.parse_address(field[9])
1486 if "email" not in self.keys[key] and "@" in addr:
1487 self.keys[key]["email"] = addr
1488 self.keys[key]["name"] = name
1489 elif signingkey and field[0] == "fpr":
1490 self.keys[key]["fingerprints"].append(field[9])
1491 self.fpr_lookup[field[9]] = key
1493 def import_users_from_ldap(self, session):
1497 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1498 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1500 l = ldap.open(LDAPServer)
1501 l.simple_bind_s("","")
1502 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1503 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1504 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1506 ldap_fin_uid_id = {}
1513 uid = entry["uid"][0]
1514 name = get_ldap_name(entry)
1515 fingerprints = entry["keyFingerPrint"]
1517 for f in fingerprints:
1518 key = self.fpr_lookup.get(f, None)
1519 if key not in self.keys:
1521 self.keys[key]["uid"] = uid
1525 keyid = get_or_set_uid(uid, session).uid_id
1526 byuid[keyid] = (uid, name)
1527 byname[uid] = (keyid, name)
1529 return (byname, byuid)
1531 def generate_users_from_keyring(self, format, session):
1535 for x in self.keys.keys():
1536 if "email" not in self.keys[x]:
1538 self.keys[x]["uid"] = format % "invalid-uid"
1540 uid = format % self.keys[x]["email"]
1541 keyid = get_or_set_uid(uid, session).uid_id
1542 byuid[keyid] = (uid, self.keys[x]["name"])
1543 byname[uid] = (keyid, self.keys[x]["name"])
1544 self.keys[x]["uid"] = uid
1547 uid = format % "invalid-uid"
1548 keyid = get_or_set_uid(uid, session).uid_id
1549 byuid[keyid] = (uid, "ungeneratable user id")
1550 byname[uid] = (keyid, "ungeneratable user id")
1552 return (byname, byuid)
1554 __all__.append('Keyring')
1557 def get_keyring(keyring, session=None):
1559 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1560 If C{keyring} already has an entry, simply return the existing Keyring
1562 @type keyring: string
1563 @param keyring: the keyring name
1566 @return: the Keyring object for this keyring
1569 q = session.query(Keyring).filter_by(keyring_name=keyring)
1573 except NoResultFound:
1576 __all__.append('get_keyring')
1578 ################################################################################
1580 class KeyringACLMap(object):
1581 def __init__(self, *args, **kwargs):
1585 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1587 __all__.append('KeyringACLMap')
1589 ################################################################################
1591 class DBChange(object):
1592 def __init__(self, *args, **kwargs):
1596 return '<DBChange %s>' % self.changesname
1598 def clean_from_queue(self):
1599 session = DBConn().session().object_session(self)
1601 # Remove changes_pool_files entries
1604 # Remove changes_pending_files references
1607 # Clear out of queue
1608 self.in_queue = None
1609 self.approved_for_id = None
1611 __all__.append('DBChange')
1614 def get_dbchange(filename, session=None):
1616 returns DBChange object for given C{filename}.
1618 @type filename: string
1619 @param filename: the name of the file
1621 @type session: Session
1622 @param session: Optional SQLA session object (a temporary one will be
1623 generated if not supplied)
1626 @return: DBChange object for the given filename (C{None} if not present)
1629 q = session.query(DBChange).filter_by(changesname=filename)
1633 except NoResultFound:
1636 __all__.append('get_dbchange')
1638 ################################################################################
1640 class Location(ORMObject):
1641 def __init__(self, path = None):
1643 # the column 'type' should go away, see comment at mapper
1644 self.archive_type = 'pool'
1646 def properties(self):
1647 return ['path', 'archive_type', 'component', 'files_count']
1649 def not_null_constraints(self):
1650 return ['path', 'archive_type']
1652 __all__.append('Location')
1655 def get_location(location, component=None, archive=None, session=None):
1657 Returns Location object for the given combination of location, component
1660 @type location: string
1661 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1663 @type component: string
1664 @param component: the component name (if None, no restriction applied)
1666 @type archive: string
1667 @param archive: the archive name (if None, no restriction applied)
1669 @rtype: Location / None
1670 @return: Either a Location object or None if one can't be found
1673 q = session.query(Location).filter_by(path=location)
1675 if archive is not None:
1676 q = q.join(Archive).filter_by(archive_name=archive)
1678 if component is not None:
1679 q = q.join(Component).filter_by(component_name=component)
1683 except NoResultFound:
1686 __all__.append('get_location')
1688 ################################################################################
1690 class Maintainer(ORMObject):
1691 def __init__(self, name = None):
1694 def properties(self):
1695 return ['name', 'maintainer_id']
1697 def not_null_constraints(self):
1700 def get_split_maintainer(self):
1701 if not hasattr(self, 'name') or self.name is None:
1702 return ('', '', '', '')
1704 return fix_maintainer(self.name.strip())
1706 __all__.append('Maintainer')
1709 def get_or_set_maintainer(name, session=None):
1711 Returns Maintainer object for given maintainer name.
1713 If no matching maintainer name is found, a row is inserted.
1716 @param name: The maintainer name to add
1718 @type session: SQLAlchemy
1719 @param session: Optional SQL session object (a temporary one will be
1720 generated if not supplied). If not passed, a commit will be performed at
1721 the end of the function, otherwise the caller is responsible for commiting.
1722 A flush will be performed either way.
1725 @return: the Maintainer object for the given maintainer
1728 q = session.query(Maintainer).filter_by(name=name)
1731 except NoResultFound:
1732 maintainer = Maintainer()
1733 maintainer.name = name
1734 session.add(maintainer)
1735 session.commit_or_flush()
1740 __all__.append('get_or_set_maintainer')
1743 def get_maintainer(maintainer_id, session=None):
1745 Return the name of the maintainer behind C{maintainer_id} or None if that
1746 maintainer_id is invalid.
1748 @type maintainer_id: int
1749 @param maintainer_id: the id of the maintainer
1752 @return: the Maintainer with this C{maintainer_id}
1755 return session.query(Maintainer).get(maintainer_id)
1757 __all__.append('get_maintainer')
1759 ################################################################################
1761 class NewComment(object):
1762 def __init__(self, *args, **kwargs):
1766 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1768 __all__.append('NewComment')
1771 def has_new_comment(package, version, session=None):
1773 Returns true if the given combination of C{package}, C{version} has a comment.
1775 @type package: string
1776 @param package: name of the package
1778 @type version: string
1779 @param version: package version
1781 @type session: Session
1782 @param session: Optional SQLA session object (a temporary one will be
1783 generated if not supplied)
1789 q = session.query(NewComment)
1790 q = q.filter_by(package=package)
1791 q = q.filter_by(version=version)
1793 return bool(q.count() > 0)
1795 __all__.append('has_new_comment')
1798 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1800 Returns (possibly empty) list of NewComment objects for the given
1803 @type package: string (optional)
1804 @param package: name of the package
1806 @type version: string (optional)
1807 @param version: package version
1809 @type comment_id: int (optional)
1810 @param comment_id: An id of a comment
1812 @type session: Session
1813 @param session: Optional SQLA session object (a temporary one will be
1814 generated if not supplied)
1817 @return: A (possibly empty) list of NewComment objects will be returned
1820 q = session.query(NewComment)
1821 if package is not None: q = q.filter_by(package=package)
1822 if version is not None: q = q.filter_by(version=version)
1823 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1827 __all__.append('get_new_comments')
1829 ################################################################################
1831 class Override(object):
1832 def __init__(self, *args, **kwargs):
1836 return '<Override %s (%s)>' % (self.package, self.suite_id)
1838 __all__.append('Override')
1841 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1843 Returns Override object for the given parameters
1845 @type package: string
1846 @param package: The name of the package
1848 @type suite: string, list or None
1849 @param suite: The name of the suite (or suites if a list) to limit to. If
1850 None, don't limit. Defaults to None.
1852 @type component: string, list or None
1853 @param component: The name of the component (or components if a list) to
1854 limit to. If None, don't limit. Defaults to None.
1856 @type overridetype: string, list or None
1857 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1858 limit to. If None, don't limit. Defaults to None.
1860 @type session: Session
1861 @param session: Optional SQLA session object (a temporary one will be
1862 generated if not supplied)
1865 @return: A (possibly empty) list of Override objects will be returned
1868 q = session.query(Override)
1869 q = q.filter_by(package=package)
1871 if suite is not None:
1872 if not isinstance(suite, list): suite = [suite]
1873 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1875 if component is not None:
1876 if not isinstance(component, list): component = [component]
1877 q = q.join(Component).filter(Component.component_name.in_(component))
1879 if overridetype is not None:
1880 if not isinstance(overridetype, list): overridetype = [overridetype]
1881 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1885 __all__.append('get_override')
1888 ################################################################################
1890 class OverrideType(object):
1891 def __init__(self, *args, **kwargs):
1895 return '<OverrideType %s>' % self.overridetype
1897 __all__.append('OverrideType')
1900 def get_override_type(override_type, session=None):
1902 Returns OverrideType object for given C{override type}.
1904 @type override_type: string
1905 @param override_type: The name of the override type
1907 @type session: Session
1908 @param session: Optional SQLA session object (a temporary one will be
1909 generated if not supplied)
1912 @return: the database id for the given override type
1915 q = session.query(OverrideType).filter_by(overridetype=override_type)
1919 except NoResultFound:
1922 __all__.append('get_override_type')
1924 ################################################################################
1926 class DebContents(object):
1927 def __init__(self, *args, **kwargs):
1931 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1933 __all__.append('DebContents')
1936 class UdebContents(object):
1937 def __init__(self, *args, **kwargs):
1941 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1943 __all__.append('UdebContents')
1945 class PendingBinContents(object):
1946 def __init__(self, *args, **kwargs):
1950 return '<PendingBinContents %s>' % self.contents_id
1952 __all__.append('PendingBinContents')
1954 def insert_pending_content_paths(package,
1959 Make sure given paths are temporarily associated with given
1963 @param package: the package to associate with should have been read in from the binary control file
1964 @type fullpaths: list
1965 @param fullpaths: the list of paths of the file being associated with the binary
1966 @type session: SQLAlchemy session
1967 @param session: Optional SQLAlchemy session. If this is passed, the caller
1968 is responsible for ensuring a transaction has begun and committing the
1969 results or rolling back based on the result code. If not passed, a commit
1970 will be performed at the end of the function
1972 @return: True upon success, False if there is a problem
1975 privatetrans = False
1978 session = DBConn().session()
1982 arch = get_architecture(package['Architecture'], session)
1983 arch_id = arch.arch_id
1985 # Remove any already existing recorded files for this package
1986 q = session.query(PendingBinContents)
1987 q = q.filter_by(package=package['Package'])
1988 q = q.filter_by(version=package['Version'])
1989 q = q.filter_by(architecture=arch_id)
1992 for fullpath in fullpaths:
1994 if fullpath.startswith( "./" ):
1995 fullpath = fullpath[2:]
1997 pca = PendingBinContents()
1998 pca.package = package['Package']
1999 pca.version = package['Version']
2001 pca.architecture = arch_id
2004 pca.type = 8 # gross
2006 pca.type = 7 # also gross
2009 # Only commit if we set up the session ourself
2017 except Exception, e:
2018 traceback.print_exc()
2020 # Only rollback if we set up the session ourself
2027 __all__.append('insert_pending_content_paths')
2029 ################################################################################
2031 class PolicyQueue(object):
2032 def __init__(self, *args, **kwargs):
2036 return '<PolicyQueue %s>' % self.queue_name
2038 __all__.append('PolicyQueue')
2041 def get_policy_queue(queuename, session=None):
2043 Returns PolicyQueue object for given C{queue name}
2045 @type queuename: string
2046 @param queuename: The name of the queue
2048 @type session: Session
2049 @param session: Optional SQLA session object (a temporary one will be
2050 generated if not supplied)
2053 @return: PolicyQueue object for the given queue
2056 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2060 except NoResultFound:
2063 __all__.append('get_policy_queue')
2066 def get_policy_queue_from_path(pathname, session=None):
2068 Returns PolicyQueue object for given C{path name}
2070 @type queuename: string
2071 @param queuename: The path
2073 @type session: Session
2074 @param session: Optional SQLA session object (a temporary one will be
2075 generated if not supplied)
2078 @return: PolicyQueue object for the given queue
2081 q = session.query(PolicyQueue).filter_by(path=pathname)
2085 except NoResultFound:
2088 __all__.append('get_policy_queue_from_path')
2090 ################################################################################
2092 class Priority(object):
2093 def __init__(self, *args, **kwargs):
2096 def __eq__(self, val):
2097 if isinstance(val, str):
2098 return (self.priority == val)
2099 # This signals to use the normal comparison operator
2100 return NotImplemented
2102 def __ne__(self, val):
2103 if isinstance(val, str):
2104 return (self.priority != val)
2105 # This signals to use the normal comparison operator
2106 return NotImplemented
2109 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2111 __all__.append('Priority')
2114 def get_priority(priority, session=None):
2116 Returns Priority object for given C{priority name}.
2118 @type priority: string
2119 @param priority: The name of the priority
2121 @type session: Session
2122 @param session: Optional SQLA session object (a temporary one will be
2123 generated if not supplied)
2126 @return: Priority object for the given priority
2129 q = session.query(Priority).filter_by(priority=priority)
2133 except NoResultFound:
2136 __all__.append('get_priority')
2139 def get_priorities(session=None):
2141 Returns dictionary of priority names -> id mappings
2143 @type session: Session
2144 @param session: Optional SQL session object (a temporary one will be
2145 generated if not supplied)
2148 @return: dictionary of priority names -> id mappings
2152 q = session.query(Priority)
2154 ret[x.priority] = x.priority_id
2158 __all__.append('get_priorities')
2160 ################################################################################
2162 class Section(object):
2163 def __init__(self, *args, **kwargs):
2166 def __eq__(self, val):
2167 if isinstance(val, str):
2168 return (self.section == val)
2169 # This signals to use the normal comparison operator
2170 return NotImplemented
2172 def __ne__(self, val):
2173 if isinstance(val, str):
2174 return (self.section != val)
2175 # This signals to use the normal comparison operator
2176 return NotImplemented
2179 return '<Section %s>' % self.section
2181 __all__.append('Section')
2184 def get_section(section, session=None):
2186 Returns Section object for given C{section name}.
2188 @type section: string
2189 @param section: The name of the section
2191 @type session: Session
2192 @param session: Optional SQLA session object (a temporary one will be
2193 generated if not supplied)
2196 @return: Section object for the given section name
2199 q = session.query(Section).filter_by(section=section)
2203 except NoResultFound:
2206 __all__.append('get_section')
2209 def get_sections(session=None):
2211 Returns dictionary of section names -> id mappings
2213 @type session: Session
2214 @param session: Optional SQL session object (a temporary one will be
2215 generated if not supplied)
2218 @return: dictionary of section names -> id mappings
2222 q = session.query(Section)
2224 ret[x.section] = x.section_id
2228 __all__.append('get_sections')
2230 ################################################################################
2232 class DBSource(ORMObject):
2233 def __init__(self, source = None, version = None, maintainer = None, \
2234 changedby = None, poolfile = None, install_date = None):
2235 self.source = source
2236 self.version = version
2237 self.maintainer = maintainer
2238 self.changedby = changedby
2239 self.poolfile = poolfile
2240 self.install_date = install_date
2242 def properties(self):
2243 return ['source', 'source_id', 'maintainer', 'changedby', \
2244 'fingerprint', 'poolfile', 'version', 'suites_count', \
2247 def not_null_constraints(self):
2248 return ['source', 'version', 'install_date', 'maintainer', \
2249 'changedby', 'poolfile', 'install_date']
2251 __all__.append('DBSource')
2254 def source_exists(source, source_version, suites = ["any"], session=None):
2256 Ensure that source exists somewhere in the archive for the binary
2257 upload being processed.
2258 1. exact match => 1.0-3
2259 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2261 @type source: string
2262 @param source: source name
2264 @type source_version: string
2265 @param source_version: expected source version
2268 @param suites: list of suites to check in, default I{any}
2270 @type session: Session
2271 @param session: Optional SQLA session object (a temporary one will be
2272 generated if not supplied)
2275 @return: returns 1 if a source with expected version is found, otherwise 0
2282 from daklib.regexes import re_bin_only_nmu
2283 orig_source_version = re_bin_only_nmu.sub('', source_version)
2285 for suite in suites:
2286 q = session.query(DBSource).filter_by(source=source). \
2287 filter(DBSource.version.in_([source_version, orig_source_version]))
2289 # source must exist in suite X, or in some other suite that's
2290 # mapped to X, recursively... silent-maps are counted too,
2291 # unreleased-maps aren't.
2292 maps = cnf.ValueList("SuiteMappings")[:]
2294 maps = [ m.split() for m in maps ]
2295 maps = [ (x[1], x[2]) for x in maps
2296 if x[0] == "map" or x[0] == "silent-map" ]
2299 if x[1] in s and x[0] not in s:
2302 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2307 # No source found so return not ok
2312 __all__.append('source_exists')
2315 def get_suites_source_in(source, session=None):
2317 Returns list of Suite objects which given C{source} name is in
2320 @param source: DBSource package name to search for
2323 @return: list of Suite objects for the given source
2326 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2328 __all__.append('get_suites_source_in')
2331 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2333 Returns list of DBSource objects for given C{source} name and other parameters
2336 @param source: DBSource package name to search for
2338 @type version: str or None
2339 @param version: DBSource version name to search for or None if not applicable
2341 @type dm_upload_allowed: bool
2342 @param dm_upload_allowed: If None, no effect. If True or False, only
2343 return packages with that dm_upload_allowed setting
2345 @type session: Session
2346 @param session: Optional SQL session object (a temporary one will be
2347 generated if not supplied)
2350 @return: list of DBSource objects for the given name (may be empty)
2353 q = session.query(DBSource).filter_by(source=source)
2355 if version is not None:
2356 q = q.filter_by(version=version)
2358 if dm_upload_allowed is not None:
2359 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2363 __all__.append('get_sources_from_name')
2365 # FIXME: This function fails badly if it finds more than 1 source package and
2366 # its implementation is trivial enough to be inlined.
2368 def get_source_in_suite(source, suite, session=None):
2370 Returns a DBSource object for a combination of C{source} and C{suite}.
2372 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2373 - B{suite} - a suite name, eg. I{unstable}
2375 @type source: string
2376 @param source: source package name
2379 @param suite: the suite name
2382 @return: the version for I{source} in I{suite}
2386 q = get_suite(suite, session).get_sources(source)
2389 except NoResultFound:
2392 __all__.append('get_source_in_suite')
2394 ################################################################################
2397 def add_dsc_to_db(u, filename, session=None):
2398 entry = u.pkg.files[filename]
2402 source.source = u.pkg.dsc["source"]
2403 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2404 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2405 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2406 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2407 source.install_date = datetime.now().date()
2409 dsc_component = entry["component"]
2410 dsc_location_id = entry["location id"]
2412 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2414 # Set up a new poolfile if necessary
2415 if not entry.has_key("files id") or not entry["files id"]:
2416 filename = entry["pool name"] + filename
2417 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2419 pfs.append(poolfile)
2420 entry["files id"] = poolfile.file_id
2422 source.poolfile_id = entry["files id"]
2425 suite_names = u.pkg.changes["distribution"].keys()
2426 source.suites = session.query(Suite). \
2427 filter(Suite.suite_name.in_(suite_names)).all()
2429 # Add the source files to the DB (files and dsc_files)
2431 dscfile.source_id = source.source_id
2432 dscfile.poolfile_id = entry["files id"]
2433 session.add(dscfile)
2435 for dsc_file, dentry in u.pkg.dsc_files.items():
2437 df.source_id = source.source_id
2439 # If the .orig tarball is already in the pool, it's
2440 # files id is stored in dsc_files by check_dsc().
2441 files_id = dentry.get("files id", None)
2443 # Find the entry in the files hash
2444 # TODO: Bail out here properly
2446 for f, e in u.pkg.files.items():
2451 if files_id is None:
2452 filename = dfentry["pool name"] + dsc_file
2454 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2455 # FIXME: needs to check for -1/-2 and or handle exception
2456 if found and obj is not None:
2457 files_id = obj.file_id
2460 # If still not found, add it
2461 if files_id is None:
2462 # HACK: Force sha1sum etc into dentry
2463 dentry["sha1sum"] = dfentry["sha1sum"]
2464 dentry["sha256sum"] = dfentry["sha256sum"]
2465 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2466 pfs.append(poolfile)
2467 files_id = poolfile.file_id
2469 poolfile = get_poolfile_by_id(files_id, session)
2470 if poolfile is None:
2471 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2472 pfs.append(poolfile)
2474 df.poolfile_id = files_id
2477 # Add the src_uploaders to the DB
2478 uploader_ids = [source.maintainer_id]
2479 if u.pkg.dsc.has_key("uploaders"):
2480 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2482 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2485 for up_id in uploader_ids:
2486 if added_ids.has_key(up_id):
2488 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2494 su.maintainer_id = up_id
2495 su.source_id = source.source_id
2500 return source, dsc_component, dsc_location_id, pfs
2502 __all__.append('add_dsc_to_db')
2505 def add_deb_to_db(u, filename, session=None):
2507 Contrary to what you might expect, this routine deals with both
2508 debs and udebs. That info is in 'dbtype', whilst 'type' is
2509 'deb' for both of them
2512 entry = u.pkg.files[filename]
2515 bin.package = entry["package"]
2516 bin.version = entry["version"]
2517 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2518 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2519 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2520 bin.binarytype = entry["dbtype"]
2523 filename = entry["pool name"] + filename
2524 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2525 if not entry.get("location id", None):
2526 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2528 if entry.get("files id", None):
2529 poolfile = get_poolfile_by_id(bin.poolfile_id)
2530 bin.poolfile_id = entry["files id"]
2532 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2533 bin.poolfile_id = entry["files id"] = poolfile.file_id
2536 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2537 if len(bin_sources) != 1:
2538 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2539 (bin.package, bin.version, entry["architecture"],
2540 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2542 bin.source_id = bin_sources[0].source_id
2544 # Add and flush object so it has an ID
2548 # Add BinAssociations
2549 for suite_name in u.pkg.changes["distribution"].keys():
2550 ba = BinAssociation()
2551 ba.binary_id = bin.binary_id
2552 ba.suite_id = get_suite(suite_name).suite_id
2557 # Deal with contents - disabled for now
2558 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2560 # print "REJECT\nCould not determine contents of package %s" % bin.package
2561 # session.rollback()
2562 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2566 __all__.append('add_deb_to_db')
2568 ################################################################################
2570 class SourceACL(object):
2571 def __init__(self, *args, **kwargs):
2575 return '<SourceACL %s>' % self.source_acl_id
2577 __all__.append('SourceACL')
2579 ################################################################################
2581 class SrcFormat(object):
2582 def __init__(self, *args, **kwargs):
2586 return '<SrcFormat %s>' % (self.format_name)
2588 __all__.append('SrcFormat')
2590 ################################################################################
2592 class SrcUploader(object):
2593 def __init__(self, *args, **kwargs):
2597 return '<SrcUploader %s>' % self.uploader_id
2599 __all__.append('SrcUploader')
2601 ################################################################################
2603 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2604 ('SuiteID', 'suite_id'),
2605 ('Version', 'version'),
2606 ('Origin', 'origin'),
2608 ('Description', 'description'),
2609 ('Untouchable', 'untouchable'),
2610 ('Announce', 'announce'),
2611 ('Codename', 'codename'),
2612 ('OverrideCodename', 'overridecodename'),
2613 ('ValidTime', 'validtime'),
2614 ('Priority', 'priority'),
2615 ('NotAutomatic', 'notautomatic'),
2616 ('CopyChanges', 'copychanges'),
2617 ('OverrideSuite', 'overridesuite')]
2619 # Why the heck don't we have any UNIQUE constraints in table suite?
2620 # TODO: Add UNIQUE constraints for appropriate columns.
2621 class Suite(ORMObject):
2622 def __init__(self, suite_name = None, version = None):
2623 self.suite_name = suite_name
2624 self.version = version
2626 def properties(self):
2627 return ['suite_name', 'version']
2629 def not_null_constraints(self):
2630 return ['suite_name', 'version']
2632 def __eq__(self, val):
2633 if isinstance(val, str):
2634 return (self.suite_name == val)
2635 # This signals to use the normal comparison operator
2636 return NotImplemented
2638 def __ne__(self, val):
2639 if isinstance(val, str):
2640 return (self.suite_name != val)
2641 # This signals to use the normal comparison operator
2642 return NotImplemented
2646 for disp, field in SUITE_FIELDS:
2647 val = getattr(self, field, None)
2649 ret.append("%s: %s" % (disp, val))
2651 return "\n".join(ret)
2653 def get_architectures(self, skipsrc=False, skipall=False):
2655 Returns list of Architecture objects
2657 @type skipsrc: boolean
2658 @param skipsrc: Whether to skip returning the 'source' architecture entry
2661 @type skipall: boolean
2662 @param skipall: Whether to skip returning the 'all' architecture entry
2666 @return: list of Architecture objects for the given name (may be empty)
2669 q = object_session(self).query(Architecture).with_parent(self)
2671 q = q.filter(Architecture.arch_string != 'source')
2673 q = q.filter(Architecture.arch_string != 'all')
2674 return q.order_by(Architecture.arch_string).all()
2676 def get_sources(self, source):
2678 Returns a query object representing DBSource that is part of C{suite}.
2680 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2682 @type source: string
2683 @param source: source package name
2685 @rtype: sqlalchemy.orm.query.Query
2686 @return: a query of DBSource
2690 session = object_session(self)
2691 return session.query(DBSource).filter_by(source = source). \
2694 __all__.append('Suite')
2697 def get_suite(suite, session=None):
2699 Returns Suite object for given C{suite name}.
2702 @param suite: The name of the suite
2704 @type session: Session
2705 @param session: Optional SQLA session object (a temporary one will be
2706 generated if not supplied)
2709 @return: Suite object for the requested suite name (None if not present)
2712 q = session.query(Suite).filter_by(suite_name=suite)
2716 except NoResultFound:
2719 __all__.append('get_suite')
2721 ################################################################################
2723 # TODO: should be removed because the implementation is too trivial
2725 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2727 Returns list of Architecture objects for given C{suite} name
2730 @param suite: Suite name to search for
2732 @type skipsrc: boolean
2733 @param skipsrc: Whether to skip returning the 'source' architecture entry
2736 @type skipall: boolean
2737 @param skipall: Whether to skip returning the 'all' architecture entry
2740 @type session: Session
2741 @param session: Optional SQL session object (a temporary one will be
2742 generated if not supplied)
2745 @return: list of Architecture objects for the given name (may be empty)
2748 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2750 __all__.append('get_suite_architectures')
2752 ################################################################################
2754 class SuiteSrcFormat(object):
2755 def __init__(self, *args, **kwargs):
2759 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2761 __all__.append('SuiteSrcFormat')
2764 def get_suite_src_formats(suite, session=None):
2766 Returns list of allowed SrcFormat for C{suite}.
2769 @param suite: Suite name to search for
2771 @type session: Session
2772 @param session: Optional SQL session object (a temporary one will be
2773 generated if not supplied)
2776 @return: the list of allowed source formats for I{suite}
2779 q = session.query(SrcFormat)
2780 q = q.join(SuiteSrcFormat)
2781 q = q.join(Suite).filter_by(suite_name=suite)
2782 q = q.order_by('format_name')
2786 __all__.append('get_suite_src_formats')
2788 ################################################################################
2791 def __init__(self, uid = None, name = None):
2795 def __eq__(self, val):
2796 if isinstance(val, str):
2797 return (self.uid == val)
2798 # This signals to use the normal comparison operator
2799 return NotImplemented
2801 def __ne__(self, val):
2802 if isinstance(val, str):
2803 return (self.uid != val)
2804 # This signals to use the normal comparison operator
2805 return NotImplemented
2808 return '<Uid %s (%s)>' % (self.uid, self.name)
2810 __all__.append('Uid')
2813 def get_or_set_uid(uidname, session=None):
2815 Returns uid object for given uidname.
2817 If no matching uidname is found, a row is inserted.
2819 @type uidname: string
2820 @param uidname: The uid to add
2822 @type session: SQLAlchemy
2823 @param session: Optional SQL session object (a temporary one will be
2824 generated if not supplied). If not passed, a commit will be performed at
2825 the end of the function, otherwise the caller is responsible for commiting.
2828 @return: the uid object for the given uidname
2831 q = session.query(Uid).filter_by(uid=uidname)
2835 except NoResultFound:
2839 session.commit_or_flush()
2844 __all__.append('get_or_set_uid')
2847 def get_uid_from_fingerprint(fpr, session=None):
2848 q = session.query(Uid)
2849 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2853 except NoResultFound:
2856 __all__.append('get_uid_from_fingerprint')
2858 ################################################################################
2860 class UploadBlock(object):
2861 def __init__(self, *args, **kwargs):
2865 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2867 __all__.append('UploadBlock')
2869 ################################################################################
2871 class DBConn(object):
2873 database module init.
2877 def __init__(self, *args, **kwargs):
2878 self.__dict__ = self.__shared_state
2880 if not getattr(self, 'initialised', False):
2881 self.initialised = True
2882 self.debug = kwargs.has_key('debug')
2885 def __setuptables(self):
2886 tables_with_primary = (
2897 'changes_pending_binaries',
2898 'changes_pending_files',
2899 'changes_pending_source',
2909 'pending_bin_contents',
2921 # The following tables have primary keys but sqlalchemy
2922 # version 0.5 fails to reflect them correctly with database
2923 # versions before upgrade #41.
2925 #'build_queue_files',
2928 tables_no_primary = (
2930 'changes_pending_files_map',
2931 'changes_pending_source_files',
2932 'changes_pool_files',
2935 'suite_architectures',
2936 'suite_src_formats',
2937 'suite_build_queue_copy',
2939 # see the comment above
2941 'build_queue_files',
2945 'almost_obsolete_all_associations',
2946 'almost_obsolete_src_associations',
2947 'any_associations_source',
2948 'bin_assoc_by_arch',
2949 'bin_associations_binaries',
2950 'binaries_suite_arch',
2951 'binfiles_suite_component_arch',
2954 'newest_all_associations',
2955 'newest_any_associations',
2957 'newest_src_association',
2958 'obsolete_all_associations',
2959 'obsolete_any_associations',
2960 'obsolete_any_by_all_associations',
2961 'obsolete_src_associations',
2963 'src_associations_bin',
2964 'src_associations_src',
2965 'suite_arch_by_name',
2968 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2969 # correctly and that is why we have to use a workaround. It can
2970 # be removed as soon as we switch to version 0.6.
2971 for table_name in tables_with_primary:
2972 table = Table(table_name, self.db_meta, \
2973 Column('id', Integer, primary_key = True), \
2974 autoload=True, useexisting=True)
2975 setattr(self, 'tbl_%s' % table_name, table)
2977 for table_name in tables_no_primary:
2978 table = Table(table_name, self.db_meta, autoload=True)
2979 setattr(self, 'tbl_%s' % table_name, table)
2981 for view_name in views:
2982 view = Table(view_name, self.db_meta, autoload=True)
2983 setattr(self, 'view_%s' % view_name, view)
2985 def __setupmappers(self):
2986 mapper(Architecture, self.tbl_architecture,
2987 properties = dict(arch_id = self.tbl_architecture.c.id,
2988 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2989 order_by='suite_name',
2990 backref=backref('architectures', order_by='arch_string'))),
2991 extension = validator)
2993 mapper(Archive, self.tbl_archive,
2994 properties = dict(archive_id = self.tbl_archive.c.id,
2995 archive_name = self.tbl_archive.c.name))
2997 mapper(BinAssociation, self.tbl_bin_associations,
2998 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2999 suite_id = self.tbl_bin_associations.c.suite,
3000 suite = relation(Suite),
3001 binary_id = self.tbl_bin_associations.c.bin,
3002 binary = relation(DBBinary)))
3004 mapper(PendingBinContents, self.tbl_pending_bin_contents,
3005 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3006 filename = self.tbl_pending_bin_contents.c.filename,
3007 package = self.tbl_pending_bin_contents.c.package,
3008 version = self.tbl_pending_bin_contents.c.version,
3009 arch = self.tbl_pending_bin_contents.c.arch,
3010 otype = self.tbl_pending_bin_contents.c.type))
3012 mapper(DebContents, self.tbl_deb_contents,
3013 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3014 package=self.tbl_deb_contents.c.package,
3015 suite=self.tbl_deb_contents.c.suite,
3016 arch=self.tbl_deb_contents.c.arch,
3017 section=self.tbl_deb_contents.c.section,
3018 filename=self.tbl_deb_contents.c.filename))
3020 mapper(UdebContents, self.tbl_udeb_contents,
3021 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3022 package=self.tbl_udeb_contents.c.package,
3023 suite=self.tbl_udeb_contents.c.suite,
3024 arch=self.tbl_udeb_contents.c.arch,
3025 section=self.tbl_udeb_contents.c.section,
3026 filename=self.tbl_udeb_contents.c.filename))
3028 mapper(BuildQueue, self.tbl_build_queue,
3029 properties = dict(queue_id = self.tbl_build_queue.c.id))
3031 mapper(BuildQueueFile, self.tbl_build_queue_files,
3032 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3033 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3035 mapper(DBBinary, self.tbl_binaries,
3036 properties = dict(binary_id = self.tbl_binaries.c.id,
3037 package = self.tbl_binaries.c.package,
3038 version = self.tbl_binaries.c.version,
3039 maintainer_id = self.tbl_binaries.c.maintainer,
3040 maintainer = relation(Maintainer),
3041 source_id = self.tbl_binaries.c.source,
3042 source = relation(DBSource),
3043 arch_id = self.tbl_binaries.c.architecture,
3044 architecture = relation(Architecture),
3045 poolfile_id = self.tbl_binaries.c.file,
3046 poolfile = relation(PoolFile),
3047 binarytype = self.tbl_binaries.c.type,
3048 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3049 fingerprint = relation(Fingerprint),
3050 install_date = self.tbl_binaries.c.install_date,
3051 binassociations = relation(BinAssociation,
3052 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
3054 mapper(BinaryACL, self.tbl_binary_acl,
3055 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3057 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3058 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3059 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3060 architecture = relation(Architecture)))
3062 mapper(Component, self.tbl_component,
3063 properties = dict(component_id = self.tbl_component.c.id,
3064 component_name = self.tbl_component.c.name))
3066 mapper(DBConfig, self.tbl_config,
3067 properties = dict(config_id = self.tbl_config.c.id))
3069 mapper(DSCFile, self.tbl_dsc_files,
3070 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3071 source_id = self.tbl_dsc_files.c.source,
3072 source = relation(DBSource),
3073 poolfile_id = self.tbl_dsc_files.c.file,
3074 poolfile = relation(PoolFile)))
3076 mapper(PoolFile, self.tbl_files,
3077 properties = dict(file_id = self.tbl_files.c.id,
3078 filesize = self.tbl_files.c.size,
3079 location_id = self.tbl_files.c.location,
3080 location = relation(Location,
3081 # using lazy='dynamic' in the back
3082 # reference because we have A LOT of
3083 # files in one location
3084 backref=backref('files', lazy='dynamic'))),
3085 extension = validator)
3087 mapper(Fingerprint, self.tbl_fingerprint,
3088 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3089 uid_id = self.tbl_fingerprint.c.uid,
3090 uid = relation(Uid),
3091 keyring_id = self.tbl_fingerprint.c.keyring,
3092 keyring = relation(Keyring),
3093 source_acl = relation(SourceACL),
3094 binary_acl = relation(BinaryACL)),
3095 extension = validator)
3097 mapper(Keyring, self.tbl_keyrings,
3098 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3099 keyring_id = self.tbl_keyrings.c.id))
3101 mapper(DBChange, self.tbl_changes,
3102 properties = dict(change_id = self.tbl_changes.c.id,
3103 poolfiles = relation(PoolFile,
3104 secondary=self.tbl_changes_pool_files,
3105 backref="changeslinks"),
3106 seen = self.tbl_changes.c.seen,
3107 source = self.tbl_changes.c.source,
3108 binaries = self.tbl_changes.c.binaries,
3109 architecture = self.tbl_changes.c.architecture,
3110 distribution = self.tbl_changes.c.distribution,
3111 urgency = self.tbl_changes.c.urgency,
3112 maintainer = self.tbl_changes.c.maintainer,
3113 changedby = self.tbl_changes.c.changedby,
3114 date = self.tbl_changes.c.date,
3115 version = self.tbl_changes.c.version,
3116 files = relation(ChangePendingFile,
3117 secondary=self.tbl_changes_pending_files_map,
3118 backref="changesfile"),
3119 in_queue_id = self.tbl_changes.c.in_queue,
3120 in_queue = relation(PolicyQueue,
3121 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3122 approved_for_id = self.tbl_changes.c.approved_for))
3124 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3125 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3127 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3128 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3129 filename = self.tbl_changes_pending_files.c.filename,
3130 size = self.tbl_changes_pending_files.c.size,
3131 md5sum = self.tbl_changes_pending_files.c.md5sum,
3132 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3133 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3135 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3136 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3137 change = relation(DBChange),
3138 maintainer = relation(Maintainer,
3139 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3140 changedby = relation(Maintainer,
3141 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3142 fingerprint = relation(Fingerprint),
3143 source_files = relation(ChangePendingFile,
3144 secondary=self.tbl_changes_pending_source_files,
3145 backref="pending_sources")))
3148 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3149 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3150 keyring = relation(Keyring, backref="keyring_acl_map"),
3151 architecture = relation(Architecture)))
3153 mapper(Location, self.tbl_location,
3154 properties = dict(location_id = self.tbl_location.c.id,
3155 component_id = self.tbl_location.c.component,
3156 component = relation(Component),
3157 archive_id = self.tbl_location.c.archive,
3158 archive = relation(Archive),
3159 # FIXME: the 'type' column is old cruft and
3160 # should be removed in the future.
3161 archive_type = self.tbl_location.c.type),
3162 extension = validator)
3164 mapper(Maintainer, self.tbl_maintainer,
3165 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3166 maintains_sources = relation(DBSource, backref='maintainer',
3167 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3168 changed_sources = relation(DBSource, backref='changedby',
3169 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3170 extension = validator)
3172 mapper(NewComment, self.tbl_new_comments,
3173 properties = dict(comment_id = self.tbl_new_comments.c.id))
3175 mapper(Override, self.tbl_override,
3176 properties = dict(suite_id = self.tbl_override.c.suite,
3177 suite = relation(Suite),
3178 package = self.tbl_override.c.package,
3179 component_id = self.tbl_override.c.component,
3180 component = relation(Component),
3181 priority_id = self.tbl_override.c.priority,
3182 priority = relation(Priority),
3183 section_id = self.tbl_override.c.section,
3184 section = relation(Section),
3185 overridetype_id = self.tbl_override.c.type,
3186 overridetype = relation(OverrideType)))
3188 mapper(OverrideType, self.tbl_override_type,
3189 properties = dict(overridetype = self.tbl_override_type.c.type,
3190 overridetype_id = self.tbl_override_type.c.id))
3192 mapper(PolicyQueue, self.tbl_policy_queue,
3193 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3195 mapper(Priority, self.tbl_priority,
3196 properties = dict(priority_id = self.tbl_priority.c.id))
3198 mapper(Section, self.tbl_section,
3199 properties = dict(section_id = self.tbl_section.c.id,
3200 section=self.tbl_section.c.section))
3202 mapper(DBSource, self.tbl_source,
3203 properties = dict(source_id = self.tbl_source.c.id,
3204 version = self.tbl_source.c.version,
3205 maintainer_id = self.tbl_source.c.maintainer,
3206 poolfile_id = self.tbl_source.c.file,
3207 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3208 fingerprint_id = self.tbl_source.c.sig_fpr,
3209 fingerprint = relation(Fingerprint),
3210 changedby_id = self.tbl_source.c.changedby,
3211 srcfiles = relation(DSCFile,
3212 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3213 suites = relation(Suite, secondary=self.tbl_src_associations,
3215 srcuploaders = relation(SrcUploader)),
3216 extension = validator)
3218 mapper(SourceACL, self.tbl_source_acl,
3219 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3221 mapper(SrcFormat, self.tbl_src_format,
3222 properties = dict(src_format_id = self.tbl_src_format.c.id,
3223 format_name = self.tbl_src_format.c.format_name))
3225 mapper(SrcUploader, self.tbl_src_uploaders,
3226 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3227 source_id = self.tbl_src_uploaders.c.source,
3228 source = relation(DBSource,
3229 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3230 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3231 maintainer = relation(Maintainer,
3232 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3234 mapper(Suite, self.tbl_suite,
3235 properties = dict(suite_id = self.tbl_suite.c.id,
3236 policy_queue = relation(PolicyQueue),
3237 copy_queues = relation(BuildQueue,
3238 secondary=self.tbl_suite_build_queue_copy)),
3239 extension = validator)
3241 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3242 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3243 suite = relation(Suite, backref='suitesrcformats'),
3244 src_format_id = self.tbl_suite_src_formats.c.src_format,
3245 src_format = relation(SrcFormat)))
3247 mapper(Uid, self.tbl_uid,
3248 properties = dict(uid_id = self.tbl_uid.c.id,
3249 fingerprint = relation(Fingerprint)))
3251 mapper(UploadBlock, self.tbl_upload_blocks,
3252 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3253 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3254 uid = relation(Uid, backref="uploadblocks")))
3256 ## Connection functions
3257 def __createconn(self):
3258 from config import Config
3262 connstr = "postgres://%s" % cnf["DB::Host"]
3263 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3264 connstr += ":%s" % cnf["DB::Port"]
3265 connstr += "/%s" % cnf["DB::Name"]
3268 connstr = "postgres:///%s" % cnf["DB::Name"]
3269 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3270 connstr += "?port=%s" % cnf["DB::Port"]
3272 self.db_pg = create_engine(connstr, echo=self.debug)
3273 self.db_meta = MetaData()
3274 self.db_meta.bind = self.db_pg
3275 self.db_smaker = sessionmaker(bind=self.db_pg,
3279 self.__setuptables()
3280 self.__setupmappers()
3283 return self.db_smaker()
3285 __all__.append('DBConn')