5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
277 def get(cls, primary_key, session = None):
279 This is a support function that allows getting an object by its primary
282 Architecture.get(3[, session])
284 instead of the more verbose
286 session.query(Architecture).get(3)
288 return session.query(cls).get(primary_key)
290 __all__.append('ORMObject')
292 ################################################################################
294 class Validator(MapperExtension):
296 This class calls the validate() method for each instance for the
297 'before_update' and 'before_insert' events. A global object validator is
298 used for configuring the individual mappers.
301 def before_update(self, mapper, connection, instance):
305 def before_insert(self, mapper, connection, instance):
309 validator = Validator()
311 ################################################################################
313 class Architecture(ORMObject):
314 def __init__(self, arch_string = None, description = None):
315 self.arch_string = arch_string
316 self.description = description
318 def __eq__(self, val):
319 if isinstance(val, str):
320 return (self.arch_string== val)
321 # This signals to use the normal comparison operator
322 return NotImplemented
324 def __ne__(self, val):
325 if isinstance(val, str):
326 return (self.arch_string != val)
327 # This signals to use the normal comparison operator
328 return NotImplemented
330 def properties(self):
331 return ['arch_string', 'arch_id', 'suites_count']
333 def not_null_constraints(self):
334 return ['arch_string']
336 __all__.append('Architecture')
339 def get_architecture(architecture, session=None):
341 Returns database id for given C{architecture}.
343 @type architecture: string
344 @param architecture: The name of the architecture
346 @type session: Session
347 @param session: Optional SQLA session object (a temporary one will be
348 generated if not supplied)
351 @return: Architecture object for the given arch (None if not present)
354 q = session.query(Architecture).filter_by(arch_string=architecture)
358 except NoResultFound:
361 __all__.append('get_architecture')
363 # TODO: should be removed because the implementation is too trivial
365 def get_architecture_suites(architecture, session=None):
367 Returns list of Suite objects for given C{architecture} name
369 @type architecture: str
370 @param architecture: Architecture name to search for
372 @type session: Session
373 @param session: Optional SQL session object (a temporary one will be
374 generated if not supplied)
377 @return: list of Suite objects for the given name (may be empty)
380 return get_architecture(architecture, session).suites
382 __all__.append('get_architecture_suites')
384 ################################################################################
386 class Archive(object):
387 def __init__(self, *args, **kwargs):
391 return '<Archive %s>' % self.archive_name
393 __all__.append('Archive')
396 def get_archive(archive, session=None):
398 returns database id for given C{archive}.
400 @type archive: string
401 @param archive: the name of the arhive
403 @type session: Session
404 @param session: Optional SQLA session object (a temporary one will be
405 generated if not supplied)
408 @return: Archive object for the given name (None if not present)
411 archive = archive.lower()
413 q = session.query(Archive).filter_by(archive_name=archive)
417 except NoResultFound:
420 __all__.append('get_archive')
422 ################################################################################
424 class BinAssociation(object):
425 def __init__(self, *args, **kwargs):
429 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
431 __all__.append('BinAssociation')
433 ################################################################################
435 class BinContents(object):
436 def __init__(self, *args, **kwargs):
440 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
442 __all__.append('BinContents')
444 ################################################################################
446 class DBBinary(ORMObject):
447 def __init__(self, package = None, source = None, version = None, \
448 maintainer = None, architecture = None, poolfile = None, \
450 self.package = package
452 self.version = version
453 self.maintainer = maintainer
454 self.architecture = architecture
455 self.poolfile = poolfile
456 self.binarytype = binarytype
458 def properties(self):
459 return ['package', 'version', 'maintainer', 'source', 'architecture', \
460 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
461 'suites_count', 'binary_id']
463 def not_null_constraints(self):
464 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
467 __all__.append('DBBinary')
470 def get_suites_binary_in(package, session=None):
472 Returns list of Suite objects which given C{package} name is in
475 @param package: DBBinary package name to search for
478 @return: list of Suite objects for the given package
481 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
483 __all__.append('get_suites_binary_in')
486 def get_binaries_from_source_id(source_id, session=None):
488 Returns list of DBBinary objects for given C{source_id}
491 @param source_id: source_id to search for
493 @type session: Session
494 @param session: Optional SQL session object (a temporary one will be
495 generated if not supplied)
498 @return: list of DBBinary objects for the given name (may be empty)
501 return session.query(DBBinary).filter_by(source_id=source_id).all()
503 __all__.append('get_binaries_from_source_id')
506 def get_binary_from_name_suite(package, suitename, session=None):
507 ### For dak examine-package
508 ### XXX: Doesn't use object API yet
510 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
511 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
512 WHERE b.package='%(package)s'
514 AND fi.location = l.id
515 AND l.component = c.id
518 AND su.suite_name %(suitename)s
519 ORDER BY b.version DESC"""
521 return session.execute(sql % {'package': package, 'suitename': suitename})
523 __all__.append('get_binary_from_name_suite')
526 def get_binary_components(package, suitename, arch, session=None):
527 # Check for packages that have moved from one component to another
528 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
529 WHERE b.package=:package AND s.suite_name=:suitename
530 AND (a.arch_string = :arch OR a.arch_string = 'all')
531 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
532 AND f.location = l.id
533 AND l.component = c.id
536 vals = {'package': package, 'suitename': suitename, 'arch': arch}
538 return session.execute(query, vals)
540 __all__.append('get_binary_components')
542 ################################################################################
544 class BinaryACL(object):
545 def __init__(self, *args, **kwargs):
549 return '<BinaryACL %s>' % self.binary_acl_id
551 __all__.append('BinaryACL')
553 ################################################################################
555 class BinaryACLMap(object):
556 def __init__(self, *args, **kwargs):
560 return '<BinaryACLMap %s>' % self.binary_acl_map_id
562 __all__.append('BinaryACLMap')
564 ################################################################################
569 ArchiveDir "%(archivepath)s";
570 OverrideDir "%(overridedir)s";
571 CacheDir "%(cachedir)s";
576 Packages::Compress ". bzip2 gzip";
577 Sources::Compress ". bzip2 gzip";
582 bindirectory "incoming"
587 BinOverride "override.sid.all3";
588 BinCacheDB "packages-accepted.db";
590 FileList "%(filelist)s";
593 Packages::Extensions ".deb .udeb";
596 bindirectory "incoming/"
599 BinOverride "override.sid.all3";
600 SrcOverride "override.sid.all3.src";
601 FileList "%(filelist)s";
605 class BuildQueue(object):
606 def __init__(self, *args, **kwargs):
610 return '<BuildQueue %s>' % self.queue_name
612 def write_metadata(self, starttime, force=False):
613 # Do we write out metafiles?
614 if not (force or self.generate_metadata):
617 session = DBConn().session().object_session(self)
619 fl_fd = fl_name = ac_fd = ac_name = None
621 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
622 startdir = os.getcwd()
625 # Grab files we want to include
626 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
627 # Write file list with newer files
628 (fl_fd, fl_name) = mkstemp()
630 os.write(fl_fd, '%s\n' % n.fullpath)
635 # Write minimal apt.conf
636 # TODO: Remove hardcoding from template
637 (ac_fd, ac_name) = mkstemp()
638 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
640 'cachedir': cnf["Dir::Cache"],
641 'overridedir': cnf["Dir::Override"],
645 # Run apt-ftparchive generate
646 os.chdir(os.path.dirname(ac_name))
647 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
649 # Run apt-ftparchive release
650 # TODO: Eww - fix this
651 bname = os.path.basename(self.path)
655 # We have to remove the Release file otherwise it'll be included in the
658 os.unlink(os.path.join(bname, 'Release'))
662 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
664 # Crude hack with open and append, but this whole section is and should be redone.
665 if self.notautomatic:
666 release=open("Release", "a")
667 release.write("NotAutomatic: yes")
672 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
673 if cnf.has_key("Dinstall::SigningPubKeyring"):
674 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
676 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
678 # Move the files if we got this far
679 os.rename('Release', os.path.join(bname, 'Release'))
681 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
683 # Clean up any left behind files
710 def clean_and_update(self, starttime, Logger, dryrun=False):
711 """WARNING: This routine commits for you"""
712 session = DBConn().session().object_session(self)
714 if self.generate_metadata and not dryrun:
715 self.write_metadata(starttime)
717 # Grab files older than our execution time
718 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
724 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
726 Logger.log(["I: Removing %s from the queue" % o.fullpath])
727 os.unlink(o.fullpath)
730 # If it wasn't there, don't worry
731 if e.errno == ENOENT:
734 # TODO: Replace with proper logging call
735 Logger.log(["E: Could not remove %s" % o.fullpath])
742 for f in os.listdir(self.path):
743 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
747 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
748 except NoResultFound:
749 fp = os.path.join(self.path, f)
751 Logger.log(["I: Would remove unused link %s" % fp])
753 Logger.log(["I: Removing unused link %s" % fp])
757 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
759 def add_file_from_pool(self, poolfile):
760 """Copies a file into the pool. Assumes that the PoolFile object is
761 attached to the same SQLAlchemy session as the Queue object is.
763 The caller is responsible for committing after calling this function."""
764 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
766 # Check if we have a file of this name or this ID already
767 for f in self.queuefiles:
768 if f.fileid is not None and f.fileid == poolfile.file_id or \
769 f.poolfile.filename == poolfile_basename:
770 # In this case, update the BuildQueueFile entry so we
771 # don't remove it too early
772 f.lastused = datetime.now()
773 DBConn().session().object_session(poolfile).add(f)
776 # Prepare BuildQueueFile object
777 qf = BuildQueueFile()
778 qf.build_queue_id = self.queue_id
779 qf.lastused = datetime.now()
780 qf.filename = poolfile_basename
782 targetpath = poolfile.fullpath
783 queuepath = os.path.join(self.path, poolfile_basename)
787 # We need to copy instead of symlink
789 utils.copy(targetpath, queuepath)
790 # NULL in the fileid field implies a copy
793 os.symlink(targetpath, queuepath)
794 qf.fileid = poolfile.file_id
798 # Get the same session as the PoolFile is using and add the qf to it
799 DBConn().session().object_session(poolfile).add(qf)
804 __all__.append('BuildQueue')
807 def get_build_queue(queuename, session=None):
809 Returns BuildQueue object for given C{queue name}, creating it if it does not
812 @type queuename: string
813 @param queuename: The name of the queue
815 @type session: Session
816 @param session: Optional SQLA session object (a temporary one will be
817 generated if not supplied)
820 @return: BuildQueue object for the given queue
823 q = session.query(BuildQueue).filter_by(queue_name=queuename)
827 except NoResultFound:
830 __all__.append('get_build_queue')
832 ################################################################################
834 class BuildQueueFile(object):
835 def __init__(self, *args, **kwargs):
839 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
843 return os.path.join(self.buildqueue.path, self.filename)
846 __all__.append('BuildQueueFile')
848 ################################################################################
850 class ChangePendingBinary(object):
851 def __init__(self, *args, **kwargs):
855 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
857 __all__.append('ChangePendingBinary')
859 ################################################################################
861 class ChangePendingFile(object):
862 def __init__(self, *args, **kwargs):
866 return '<ChangePendingFile %s>' % self.change_pending_file_id
868 __all__.append('ChangePendingFile')
870 ################################################################################
872 class ChangePendingSource(object):
873 def __init__(self, *args, **kwargs):
877 return '<ChangePendingSource %s>' % self.change_pending_source_id
879 __all__.append('ChangePendingSource')
881 ################################################################################
883 class Component(object):
884 def __init__(self, *args, **kwargs):
887 def __eq__(self, val):
888 if isinstance(val, str):
889 return (self.component_name == val)
890 # This signals to use the normal comparison operator
891 return NotImplemented
893 def __ne__(self, val):
894 if isinstance(val, str):
895 return (self.component_name != val)
896 # This signals to use the normal comparison operator
897 return NotImplemented
900 return '<Component %s>' % self.component_name
903 __all__.append('Component')
906 def get_component(component, session=None):
908 Returns database id for given C{component}.
910 @type component: string
911 @param component: The name of the override type
914 @return: the database id for the given component
917 component = component.lower()
919 q = session.query(Component).filter_by(component_name=component)
923 except NoResultFound:
926 __all__.append('get_component')
928 ################################################################################
930 class DBConfig(object):
931 def __init__(self, *args, **kwargs):
935 return '<DBConfig %s>' % self.name
937 __all__.append('DBConfig')
939 ################################################################################
942 def get_or_set_contents_file_id(filename, session=None):
944 Returns database id for given filename.
946 If no matching file is found, a row is inserted.
948 @type filename: string
949 @param filename: The filename
950 @type session: SQLAlchemy
951 @param session: Optional SQL session object (a temporary one will be
952 generated if not supplied). If not passed, a commit will be performed at
953 the end of the function, otherwise the caller is responsible for commiting.
956 @return: the database id for the given component
959 q = session.query(ContentFilename).filter_by(filename=filename)
962 ret = q.one().cafilename_id
963 except NoResultFound:
964 cf = ContentFilename()
965 cf.filename = filename
967 session.commit_or_flush()
968 ret = cf.cafilename_id
972 __all__.append('get_or_set_contents_file_id')
975 def get_contents(suite, overridetype, section=None, session=None):
977 Returns contents for a suite / overridetype combination, limiting
978 to a section if not None.
981 @param suite: Suite object
983 @type overridetype: OverrideType
984 @param overridetype: OverrideType object
986 @type section: Section
987 @param section: Optional section object to limit results to
989 @type session: SQLAlchemy
990 @param session: Optional SQL session object (a temporary one will be
991 generated if not supplied)
994 @return: ResultsProxy object set up to return tuples of (filename, section,
998 # find me all of the contents for a given suite
999 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1003 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1004 JOIN content_file_names n ON (c.filename=n.id)
1005 JOIN binaries b ON (b.id=c.binary_pkg)
1006 JOIN override o ON (o.package=b.package)
1007 JOIN section s ON (s.id=o.section)
1008 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1009 AND b.type=:overridetypename"""
1011 vals = {'suiteid': suite.suite_id,
1012 'overridetypeid': overridetype.overridetype_id,
1013 'overridetypename': overridetype.overridetype}
1015 if section is not None:
1016 contents_q += " AND s.id = :sectionid"
1017 vals['sectionid'] = section.section_id
1019 contents_q += " ORDER BY fn"
1021 return session.execute(contents_q, vals)
1023 __all__.append('get_contents')
1025 ################################################################################
1027 class ContentFilepath(object):
1028 def __init__(self, *args, **kwargs):
1032 return '<ContentFilepath %s>' % self.filepath
1034 __all__.append('ContentFilepath')
1037 def get_or_set_contents_path_id(filepath, session=None):
1039 Returns database id for given path.
1041 If no matching file is found, a row is inserted.
1043 @type filepath: string
1044 @param filepath: The filepath
1046 @type session: SQLAlchemy
1047 @param session: Optional SQL session object (a temporary one will be
1048 generated if not supplied). If not passed, a commit will be performed at
1049 the end of the function, otherwise the caller is responsible for commiting.
1052 @return: the database id for the given path
1055 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1058 ret = q.one().cafilepath_id
1059 except NoResultFound:
1060 cf = ContentFilepath()
1061 cf.filepath = filepath
1063 session.commit_or_flush()
1064 ret = cf.cafilepath_id
1068 __all__.append('get_or_set_contents_path_id')
1070 ################################################################################
1072 class ContentAssociation(object):
1073 def __init__(self, *args, **kwargs):
1077 return '<ContentAssociation %s>' % self.ca_id
1079 __all__.append('ContentAssociation')
1081 def insert_content_paths(binary_id, fullpaths, session=None):
1083 Make sure given path is associated with given binary id
1085 @type binary_id: int
1086 @param binary_id: the id of the binary
1087 @type fullpaths: list
1088 @param fullpaths: the list of paths of the file being associated with the binary
1089 @type session: SQLAlchemy session
1090 @param session: Optional SQLAlchemy session. If this is passed, the caller
1091 is responsible for ensuring a transaction has begun and committing the
1092 results or rolling back based on the result code. If not passed, a commit
1093 will be performed at the end of the function, otherwise the caller is
1094 responsible for commiting.
1096 @return: True upon success
1099 privatetrans = False
1101 session = DBConn().session()
1106 def generate_path_dicts():
1107 for fullpath in fullpaths:
1108 if fullpath.startswith( './' ):
1109 fullpath = fullpath[2:]
1111 yield {'filename':fullpath, 'id': binary_id }
1113 for d in generate_path_dicts():
1114 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1123 traceback.print_exc()
1125 # Only rollback if we set up the session ourself
1132 __all__.append('insert_content_paths')
1134 ################################################################################
1136 class DSCFile(object):
1137 def __init__(self, *args, **kwargs):
1141 return '<DSCFile %s>' % self.dscfile_id
1143 __all__.append('DSCFile')
1146 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1148 Returns a list of DSCFiles which may be empty
1150 @type dscfile_id: int (optional)
1151 @param dscfile_id: the dscfile_id of the DSCFiles to find
1153 @type source_id: int (optional)
1154 @param source_id: the source id related to the DSCFiles to find
1156 @type poolfile_id: int (optional)
1157 @param poolfile_id: the poolfile id related to the DSCFiles to find
1160 @return: Possibly empty list of DSCFiles
1163 q = session.query(DSCFile)
1165 if dscfile_id is not None:
1166 q = q.filter_by(dscfile_id=dscfile_id)
1168 if source_id is not None:
1169 q = q.filter_by(source_id=source_id)
1171 if poolfile_id is not None:
1172 q = q.filter_by(poolfile_id=poolfile_id)
1176 __all__.append('get_dscfiles')
1178 ################################################################################
1180 class PoolFile(ORMObject):
1181 def __init__(self, filename = None, location = None, filesize = -1, \
1183 self.filename = filename
1184 self.location = location
1185 self.filesize = filesize
1186 self.md5sum = md5sum
1190 return os.path.join(self.location.path, self.filename)
1192 def is_valid(self, filesize = -1, md5sum = None):\
1193 return self.filesize == filesize and self.md5sum == md5sum
1195 def properties(self):
1196 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1197 'sha256sum', 'location', 'source', 'binary', 'last_used']
1199 def not_null_constraints(self):
1200 return ['filename', 'md5sum', 'location']
1202 __all__.append('PoolFile')
1205 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1208 (ValidFileFound [boolean], PoolFile object or None)
1210 @type filename: string
1211 @param filename: the filename of the file to check against the DB
1214 @param filesize: the size of the file to check against the DB
1216 @type md5sum: string
1217 @param md5sum: the md5sum of the file to check against the DB
1219 @type location_id: int
1220 @param location_id: the id of the location to look in
1223 @return: Tuple of length 2.
1224 - If valid pool file found: (C{True}, C{PoolFile object})
1225 - If valid pool file not found:
1226 - (C{False}, C{None}) if no file found
1227 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1230 poolfile = session.query(Location).get(location_id). \
1231 files.filter_by(filename=filename).first()
1233 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1236 return (valid, poolfile)
1238 __all__.append('check_poolfile')
1240 # TODO: the implementation can trivially be inlined at the place where the
1241 # function is called
1243 def get_poolfile_by_id(file_id, session=None):
1245 Returns a PoolFile objects or None for the given id
1248 @param file_id: the id of the file to look for
1250 @rtype: PoolFile or None
1251 @return: either the PoolFile object or None
1254 return session.query(PoolFile).get(file_id)
1256 __all__.append('get_poolfile_by_id')
1259 def get_poolfile_like_name(filename, session=None):
1261 Returns an array of PoolFile objects which are like the given name
1263 @type filename: string
1264 @param filename: the filename of the file to check against the DB
1267 @return: array of PoolFile objects
1270 # TODO: There must be a way of properly using bind parameters with %FOO%
1271 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1275 __all__.append('get_poolfile_like_name')
1278 def add_poolfile(filename, datadict, location_id, session=None):
1280 Add a new file to the pool
1282 @type filename: string
1283 @param filename: filename
1285 @type datadict: dict
1286 @param datadict: dict with needed data
1288 @type location_id: int
1289 @param location_id: database id of the location
1292 @return: the PoolFile object created
1294 poolfile = PoolFile()
1295 poolfile.filename = filename
1296 poolfile.filesize = datadict["size"]
1297 poolfile.md5sum = datadict["md5sum"]
1298 poolfile.sha1sum = datadict["sha1sum"]
1299 poolfile.sha256sum = datadict["sha256sum"]
1300 poolfile.location_id = location_id
1302 session.add(poolfile)
1303 # Flush to get a file id (NB: This is not a commit)
1308 __all__.append('add_poolfile')
1310 ################################################################################
1312 class Fingerprint(ORMObject):
1313 def __init__(self, fingerprint = None):
1314 self.fingerprint = fingerprint
1316 def properties(self):
1317 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1320 def not_null_constraints(self):
1321 return ['fingerprint']
1323 __all__.append('Fingerprint')
1326 def get_fingerprint(fpr, session=None):
1328 Returns Fingerprint object for given fpr.
1331 @param fpr: The fpr to find / add
1333 @type session: SQLAlchemy
1334 @param session: Optional SQL session object (a temporary one will be
1335 generated if not supplied).
1338 @return: the Fingerprint object for the given fpr or None
1341 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1345 except NoResultFound:
1350 __all__.append('get_fingerprint')
1353 def get_or_set_fingerprint(fpr, session=None):
1355 Returns Fingerprint object for given fpr.
1357 If no matching fpr is found, a row is inserted.
1360 @param fpr: The fpr to find / add
1362 @type session: SQLAlchemy
1363 @param session: Optional SQL session object (a temporary one will be
1364 generated if not supplied). If not passed, a commit will be performed at
1365 the end of the function, otherwise the caller is responsible for commiting.
1366 A flush will be performed either way.
1369 @return: the Fingerprint object for the given fpr
1372 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1376 except NoResultFound:
1377 fingerprint = Fingerprint()
1378 fingerprint.fingerprint = fpr
1379 session.add(fingerprint)
1380 session.commit_or_flush()
1385 __all__.append('get_or_set_fingerprint')
1387 ################################################################################
1389 # Helper routine for Keyring class
1390 def get_ldap_name(entry):
1392 for k in ["cn", "mn", "sn"]:
1394 if ret and ret[0] != "" and ret[0] != "-":
1396 return " ".join(name)
1398 ################################################################################
1400 class Keyring(object):
1401 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1402 " --with-colons --fingerprint --fingerprint"
1407 def __init__(self, *args, **kwargs):
1411 return '<Keyring %s>' % self.keyring_name
1413 def de_escape_gpg_str(self, txt):
1414 esclist = re.split(r'(\\x..)', txt)
1415 for x in range(1,len(esclist),2):
1416 esclist[x] = "%c" % (int(esclist[x][2:],16))
1417 return "".join(esclist)
1419 def parse_address(self, uid):
1420 """parses uid and returns a tuple of real name and email address"""
1422 (name, address) = email.Utils.parseaddr(uid)
1423 name = re.sub(r"\s*[(].*[)]", "", name)
1424 name = self.de_escape_gpg_str(name)
1427 return (name, address)
1429 def load_keys(self, keyring):
1430 if not self.keyring_id:
1431 raise Exception('Must be initialized with database information')
1433 k = os.popen(self.gpg_invocation % keyring, "r")
1437 for line in k.xreadlines():
1438 field = line.split(":")
1439 if field[0] == "pub":
1442 (name, addr) = self.parse_address(field[9])
1444 self.keys[key]["email"] = addr
1445 self.keys[key]["name"] = name
1446 self.keys[key]["fingerprints"] = []
1448 elif key and field[0] == "sub" and len(field) >= 12:
1449 signingkey = ("s" in field[11])
1450 elif key and field[0] == "uid":
1451 (name, addr) = self.parse_address(field[9])
1452 if "email" not in self.keys[key] and "@" in addr:
1453 self.keys[key]["email"] = addr
1454 self.keys[key]["name"] = name
1455 elif signingkey and field[0] == "fpr":
1456 self.keys[key]["fingerprints"].append(field[9])
1457 self.fpr_lookup[field[9]] = key
1459 def import_users_from_ldap(self, session):
1463 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1464 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1466 l = ldap.open(LDAPServer)
1467 l.simple_bind_s("","")
1468 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1469 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1470 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1472 ldap_fin_uid_id = {}
1479 uid = entry["uid"][0]
1480 name = get_ldap_name(entry)
1481 fingerprints = entry["keyFingerPrint"]
1483 for f in fingerprints:
1484 key = self.fpr_lookup.get(f, None)
1485 if key not in self.keys:
1487 self.keys[key]["uid"] = uid
1491 keyid = get_or_set_uid(uid, session).uid_id
1492 byuid[keyid] = (uid, name)
1493 byname[uid] = (keyid, name)
1495 return (byname, byuid)
1497 def generate_users_from_keyring(self, format, session):
1501 for x in self.keys.keys():
1502 if "email" not in self.keys[x]:
1504 self.keys[x]["uid"] = format % "invalid-uid"
1506 uid = format % self.keys[x]["email"]
1507 keyid = get_or_set_uid(uid, session).uid_id
1508 byuid[keyid] = (uid, self.keys[x]["name"])
1509 byname[uid] = (keyid, self.keys[x]["name"])
1510 self.keys[x]["uid"] = uid
1513 uid = format % "invalid-uid"
1514 keyid = get_or_set_uid(uid, session).uid_id
1515 byuid[keyid] = (uid, "ungeneratable user id")
1516 byname[uid] = (keyid, "ungeneratable user id")
1518 return (byname, byuid)
1520 __all__.append('Keyring')
1523 def get_keyring(keyring, session=None):
1525 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1526 If C{keyring} already has an entry, simply return the existing Keyring
1528 @type keyring: string
1529 @param keyring: the keyring name
1532 @return: the Keyring object for this keyring
1535 q = session.query(Keyring).filter_by(keyring_name=keyring)
1539 except NoResultFound:
1542 __all__.append('get_keyring')
1544 ################################################################################
1546 class KeyringACLMap(object):
1547 def __init__(self, *args, **kwargs):
1551 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1553 __all__.append('KeyringACLMap')
1555 ################################################################################
1557 class DBChange(object):
1558 def __init__(self, *args, **kwargs):
1562 return '<DBChange %s>' % self.changesname
1564 def clean_from_queue(self):
1565 session = DBConn().session().object_session(self)
1567 # Remove changes_pool_files entries
1570 # Remove changes_pending_files references
1573 # Clear out of queue
1574 self.in_queue = None
1575 self.approved_for_id = None
1577 __all__.append('DBChange')
1580 def get_dbchange(filename, session=None):
1582 returns DBChange object for given C{filename}.
1584 @type filename: string
1585 @param filename: the name of the file
1587 @type session: Session
1588 @param session: Optional SQLA session object (a temporary one will be
1589 generated if not supplied)
1592 @return: DBChange object for the given filename (C{None} if not present)
1595 q = session.query(DBChange).filter_by(changesname=filename)
1599 except NoResultFound:
1602 __all__.append('get_dbchange')
1604 ################################################################################
1606 class Location(ORMObject):
1607 def __init__(self, path = None):
1609 # the column 'type' should go away, see comment at mapper
1610 self.archive_type = 'pool'
1612 def properties(self):
1613 return ['path', 'archive_type', 'component', 'files_count']
1615 def not_null_constraints(self):
1616 return ['path', 'archive_type']
1618 __all__.append('Location')
1621 def get_location(location, component=None, archive=None, session=None):
1623 Returns Location object for the given combination of location, component
1626 @type location: string
1627 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1629 @type component: string
1630 @param component: the component name (if None, no restriction applied)
1632 @type archive: string
1633 @param archive: the archive name (if None, no restriction applied)
1635 @rtype: Location / None
1636 @return: Either a Location object or None if one can't be found
1639 q = session.query(Location).filter_by(path=location)
1641 if archive is not None:
1642 q = q.join(Archive).filter_by(archive_name=archive)
1644 if component is not None:
1645 q = q.join(Component).filter_by(component_name=component)
1649 except NoResultFound:
1652 __all__.append('get_location')
1654 ################################################################################
1656 class Maintainer(ORMObject):
1657 def __init__(self, name = None):
1660 def properties(self):
1661 return ['name', 'maintainer_id']
1663 def not_null_constraints(self):
1666 def get_split_maintainer(self):
1667 if not hasattr(self, 'name') or self.name is None:
1668 return ('', '', '', '')
1670 return fix_maintainer(self.name.strip())
1672 __all__.append('Maintainer')
1675 def get_or_set_maintainer(name, session=None):
1677 Returns Maintainer object for given maintainer name.
1679 If no matching maintainer name is found, a row is inserted.
1682 @param name: The maintainer name to add
1684 @type session: SQLAlchemy
1685 @param session: Optional SQL session object (a temporary one will be
1686 generated if not supplied). If not passed, a commit will be performed at
1687 the end of the function, otherwise the caller is responsible for commiting.
1688 A flush will be performed either way.
1691 @return: the Maintainer object for the given maintainer
1694 q = session.query(Maintainer).filter_by(name=name)
1697 except NoResultFound:
1698 maintainer = Maintainer()
1699 maintainer.name = name
1700 session.add(maintainer)
1701 session.commit_or_flush()
1706 __all__.append('get_or_set_maintainer')
1709 def get_maintainer(maintainer_id, session=None):
1711 Return the name of the maintainer behind C{maintainer_id} or None if that
1712 maintainer_id is invalid.
1714 @type maintainer_id: int
1715 @param maintainer_id: the id of the maintainer
1718 @return: the Maintainer with this C{maintainer_id}
1721 return session.query(Maintainer).get(maintainer_id)
1723 __all__.append('get_maintainer')
1725 ################################################################################
1727 class NewComment(object):
1728 def __init__(self, *args, **kwargs):
1732 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1734 __all__.append('NewComment')
1737 def has_new_comment(package, version, session=None):
1739 Returns true if the given combination of C{package}, C{version} has a comment.
1741 @type package: string
1742 @param package: name of the package
1744 @type version: string
1745 @param version: package version
1747 @type session: Session
1748 @param session: Optional SQLA session object (a temporary one will be
1749 generated if not supplied)
1755 q = session.query(NewComment)
1756 q = q.filter_by(package=package)
1757 q = q.filter_by(version=version)
1759 return bool(q.count() > 0)
1761 __all__.append('has_new_comment')
1764 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1766 Returns (possibly empty) list of NewComment objects for the given
1769 @type package: string (optional)
1770 @param package: name of the package
1772 @type version: string (optional)
1773 @param version: package version
1775 @type comment_id: int (optional)
1776 @param comment_id: An id of a comment
1778 @type session: Session
1779 @param session: Optional SQLA session object (a temporary one will be
1780 generated if not supplied)
1783 @return: A (possibly empty) list of NewComment objects will be returned
1786 q = session.query(NewComment)
1787 if package is not None: q = q.filter_by(package=package)
1788 if version is not None: q = q.filter_by(version=version)
1789 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1793 __all__.append('get_new_comments')
1795 ################################################################################
1797 class Override(object):
1798 def __init__(self, *args, **kwargs):
1802 return '<Override %s (%s)>' % (self.package, self.suite_id)
1804 __all__.append('Override')
1807 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1809 Returns Override object for the given parameters
1811 @type package: string
1812 @param package: The name of the package
1814 @type suite: string, list or None
1815 @param suite: The name of the suite (or suites if a list) to limit to. If
1816 None, don't limit. Defaults to None.
1818 @type component: string, list or None
1819 @param component: The name of the component (or components if a list) to
1820 limit to. If None, don't limit. Defaults to None.
1822 @type overridetype: string, list or None
1823 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1824 limit to. If None, don't limit. Defaults to None.
1826 @type session: Session
1827 @param session: Optional SQLA session object (a temporary one will be
1828 generated if not supplied)
1831 @return: A (possibly empty) list of Override objects will be returned
1834 q = session.query(Override)
1835 q = q.filter_by(package=package)
1837 if suite is not None:
1838 if not isinstance(suite, list): suite = [suite]
1839 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1841 if component is not None:
1842 if not isinstance(component, list): component = [component]
1843 q = q.join(Component).filter(Component.component_name.in_(component))
1845 if overridetype is not None:
1846 if not isinstance(overridetype, list): overridetype = [overridetype]
1847 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1851 __all__.append('get_override')
1854 ################################################################################
1856 class OverrideType(object):
1857 def __init__(self, *args, **kwargs):
1861 return '<OverrideType %s>' % self.overridetype
1863 __all__.append('OverrideType')
1866 def get_override_type(override_type, session=None):
1868 Returns OverrideType object for given C{override type}.
1870 @type override_type: string
1871 @param override_type: The name of the override type
1873 @type session: Session
1874 @param session: Optional SQLA session object (a temporary one will be
1875 generated if not supplied)
1878 @return: the database id for the given override type
1881 q = session.query(OverrideType).filter_by(overridetype=override_type)
1885 except NoResultFound:
1888 __all__.append('get_override_type')
1890 ################################################################################
1892 class DebContents(object):
1893 def __init__(self, *args, **kwargs):
1897 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1899 __all__.append('DebContents')
1902 class UdebContents(object):
1903 def __init__(self, *args, **kwargs):
1907 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1909 __all__.append('UdebContents')
1911 class PendingBinContents(object):
1912 def __init__(self, *args, **kwargs):
1916 return '<PendingBinContents %s>' % self.contents_id
1918 __all__.append('PendingBinContents')
1920 def insert_pending_content_paths(package,
1925 Make sure given paths are temporarily associated with given
1929 @param package: the package to associate with should have been read in from the binary control file
1930 @type fullpaths: list
1931 @param fullpaths: the list of paths of the file being associated with the binary
1932 @type session: SQLAlchemy session
1933 @param session: Optional SQLAlchemy session. If this is passed, the caller
1934 is responsible for ensuring a transaction has begun and committing the
1935 results or rolling back based on the result code. If not passed, a commit
1936 will be performed at the end of the function
1938 @return: True upon success, False if there is a problem
1941 privatetrans = False
1944 session = DBConn().session()
1948 arch = get_architecture(package['Architecture'], session)
1949 arch_id = arch.arch_id
1951 # Remove any already existing recorded files for this package
1952 q = session.query(PendingBinContents)
1953 q = q.filter_by(package=package['Package'])
1954 q = q.filter_by(version=package['Version'])
1955 q = q.filter_by(architecture=arch_id)
1958 for fullpath in fullpaths:
1960 if fullpath.startswith( "./" ):
1961 fullpath = fullpath[2:]
1963 pca = PendingBinContents()
1964 pca.package = package['Package']
1965 pca.version = package['Version']
1967 pca.architecture = arch_id
1970 pca.type = 8 # gross
1972 pca.type = 7 # also gross
1975 # Only commit if we set up the session ourself
1983 except Exception, e:
1984 traceback.print_exc()
1986 # Only rollback if we set up the session ourself
1993 __all__.append('insert_pending_content_paths')
1995 ################################################################################
1997 class PolicyQueue(object):
1998 def __init__(self, *args, **kwargs):
2002 return '<PolicyQueue %s>' % self.queue_name
2004 __all__.append('PolicyQueue')
2007 def get_policy_queue(queuename, session=None):
2009 Returns PolicyQueue object for given C{queue name}
2011 @type queuename: string
2012 @param queuename: The name of the queue
2014 @type session: Session
2015 @param session: Optional SQLA session object (a temporary one will be
2016 generated if not supplied)
2019 @return: PolicyQueue object for the given queue
2022 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2026 except NoResultFound:
2029 __all__.append('get_policy_queue')
2032 def get_policy_queue_from_path(pathname, session=None):
2034 Returns PolicyQueue object for given C{path name}
2036 @type queuename: string
2037 @param queuename: The path
2039 @type session: Session
2040 @param session: Optional SQLA session object (a temporary one will be
2041 generated if not supplied)
2044 @return: PolicyQueue object for the given queue
2047 q = session.query(PolicyQueue).filter_by(path=pathname)
2051 except NoResultFound:
2054 __all__.append('get_policy_queue_from_path')
2056 ################################################################################
2058 class Priority(object):
2059 def __init__(self, *args, **kwargs):
2062 def __eq__(self, val):
2063 if isinstance(val, str):
2064 return (self.priority == val)
2065 # This signals to use the normal comparison operator
2066 return NotImplemented
2068 def __ne__(self, val):
2069 if isinstance(val, str):
2070 return (self.priority != val)
2071 # This signals to use the normal comparison operator
2072 return NotImplemented
2075 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2077 __all__.append('Priority')
2080 def get_priority(priority, session=None):
2082 Returns Priority object for given C{priority name}.
2084 @type priority: string
2085 @param priority: The name of the priority
2087 @type session: Session
2088 @param session: Optional SQLA session object (a temporary one will be
2089 generated if not supplied)
2092 @return: Priority object for the given priority
2095 q = session.query(Priority).filter_by(priority=priority)
2099 except NoResultFound:
2102 __all__.append('get_priority')
2105 def get_priorities(session=None):
2107 Returns dictionary of priority names -> id mappings
2109 @type session: Session
2110 @param session: Optional SQL session object (a temporary one will be
2111 generated if not supplied)
2114 @return: dictionary of priority names -> id mappings
2118 q = session.query(Priority)
2120 ret[x.priority] = x.priority_id
2124 __all__.append('get_priorities')
2126 ################################################################################
2128 class Section(object):
2129 def __init__(self, *args, **kwargs):
2132 def __eq__(self, val):
2133 if isinstance(val, str):
2134 return (self.section == val)
2135 # This signals to use the normal comparison operator
2136 return NotImplemented
2138 def __ne__(self, val):
2139 if isinstance(val, str):
2140 return (self.section != val)
2141 # This signals to use the normal comparison operator
2142 return NotImplemented
2145 return '<Section %s>' % self.section
2147 __all__.append('Section')
2150 def get_section(section, session=None):
2152 Returns Section object for given C{section name}.
2154 @type section: string
2155 @param section: The name of the section
2157 @type session: Session
2158 @param session: Optional SQLA session object (a temporary one will be
2159 generated if not supplied)
2162 @return: Section object for the given section name
2165 q = session.query(Section).filter_by(section=section)
2169 except NoResultFound:
2172 __all__.append('get_section')
2175 def get_sections(session=None):
2177 Returns dictionary of section names -> id mappings
2179 @type session: Session
2180 @param session: Optional SQL session object (a temporary one will be
2181 generated if not supplied)
2184 @return: dictionary of section names -> id mappings
2188 q = session.query(Section)
2190 ret[x.section] = x.section_id
2194 __all__.append('get_sections')
2196 ################################################################################
2198 class DBSource(ORMObject):
2199 def __init__(self, source = None, version = None, maintainer = None, \
2200 changedby = None, poolfile = None, install_date = None):
2201 self.source = source
2202 self.version = version
2203 self.maintainer = maintainer
2204 self.changedby = changedby
2205 self.poolfile = poolfile
2206 self.install_date = install_date
2208 def properties(self):
2209 return ['source', 'source_id', 'maintainer', 'changedby', \
2210 'fingerprint', 'poolfile', 'version', 'suites_count', \
2211 'install_date', 'binaries_count']
2213 def not_null_constraints(self):
2214 return ['source', 'version', 'install_date', 'maintainer', \
2215 'changedby', 'poolfile', 'install_date']
2217 __all__.append('DBSource')
2220 def source_exists(source, source_version, suites = ["any"], session=None):
2222 Ensure that source exists somewhere in the archive for the binary
2223 upload being processed.
2224 1. exact match => 1.0-3
2225 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2227 @type source: string
2228 @param source: source name
2230 @type source_version: string
2231 @param source_version: expected source version
2234 @param suites: list of suites to check in, default I{any}
2236 @type session: Session
2237 @param session: Optional SQLA session object (a temporary one will be
2238 generated if not supplied)
2241 @return: returns 1 if a source with expected version is found, otherwise 0
2248 from daklib.regexes import re_bin_only_nmu
2249 orig_source_version = re_bin_only_nmu.sub('', source_version)
2251 for suite in suites:
2252 q = session.query(DBSource).filter_by(source=source). \
2253 filter(DBSource.version.in_([source_version, orig_source_version]))
2255 # source must exist in suite X, or in some other suite that's
2256 # mapped to X, recursively... silent-maps are counted too,
2257 # unreleased-maps aren't.
2258 maps = cnf.ValueList("SuiteMappings")[:]
2260 maps = [ m.split() for m in maps ]
2261 maps = [ (x[1], x[2]) for x in maps
2262 if x[0] == "map" or x[0] == "silent-map" ]
2265 if x[1] in s and x[0] not in s:
2268 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2273 # No source found so return not ok
2278 __all__.append('source_exists')
2281 def get_suites_source_in(source, session=None):
2283 Returns list of Suite objects which given C{source} name is in
2286 @param source: DBSource package name to search for
2289 @return: list of Suite objects for the given source
2292 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2294 __all__.append('get_suites_source_in')
2297 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2299 Returns list of DBSource objects for given C{source} name and other parameters
2302 @param source: DBSource package name to search for
2304 @type version: str or None
2305 @param version: DBSource version name to search for or None if not applicable
2307 @type dm_upload_allowed: bool
2308 @param dm_upload_allowed: If None, no effect. If True or False, only
2309 return packages with that dm_upload_allowed setting
2311 @type session: Session
2312 @param session: Optional SQL session object (a temporary one will be
2313 generated if not supplied)
2316 @return: list of DBSource objects for the given name (may be empty)
2319 q = session.query(DBSource).filter_by(source=source)
2321 if version is not None:
2322 q = q.filter_by(version=version)
2324 if dm_upload_allowed is not None:
2325 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2329 __all__.append('get_sources_from_name')
2331 # FIXME: This function fails badly if it finds more than 1 source package and
2332 # its implementation is trivial enough to be inlined.
2334 def get_source_in_suite(source, suite, session=None):
2336 Returns a DBSource object for a combination of C{source} and C{suite}.
2338 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2339 - B{suite} - a suite name, eg. I{unstable}
2341 @type source: string
2342 @param source: source package name
2345 @param suite: the suite name
2348 @return: the version for I{source} in I{suite}
2352 q = get_suite(suite, session).get_sources(source)
2355 except NoResultFound:
2358 __all__.append('get_source_in_suite')
2360 ################################################################################
2363 def add_dsc_to_db(u, filename, session=None):
2364 entry = u.pkg.files[filename]
2368 source.source = u.pkg.dsc["source"]
2369 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2370 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2371 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2372 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2373 source.install_date = datetime.now().date()
2375 dsc_component = entry["component"]
2376 dsc_location_id = entry["location id"]
2378 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2380 # Set up a new poolfile if necessary
2381 if not entry.has_key("files id") or not entry["files id"]:
2382 filename = entry["pool name"] + filename
2383 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2385 pfs.append(poolfile)
2386 entry["files id"] = poolfile.file_id
2388 source.poolfile_id = entry["files id"]
2391 suite_names = u.pkg.changes["distribution"].keys()
2392 source.suites = session.query(Suite). \
2393 filter(Suite.suite_name.in_(suite_names)).all()
2395 # Add the source files to the DB (files and dsc_files)
2397 dscfile.source_id = source.source_id
2398 dscfile.poolfile_id = entry["files id"]
2399 session.add(dscfile)
2401 for dsc_file, dentry in u.pkg.dsc_files.items():
2403 df.source_id = source.source_id
2405 # If the .orig tarball is already in the pool, it's
2406 # files id is stored in dsc_files by check_dsc().
2407 files_id = dentry.get("files id", None)
2409 # Find the entry in the files hash
2410 # TODO: Bail out here properly
2412 for f, e in u.pkg.files.items():
2417 if files_id is None:
2418 filename = dfentry["pool name"] + dsc_file
2420 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2421 # FIXME: needs to check for -1/-2 and or handle exception
2422 if found and obj is not None:
2423 files_id = obj.file_id
2426 # If still not found, add it
2427 if files_id is None:
2428 # HACK: Force sha1sum etc into dentry
2429 dentry["sha1sum"] = dfentry["sha1sum"]
2430 dentry["sha256sum"] = dfentry["sha256sum"]
2431 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2432 pfs.append(poolfile)
2433 files_id = poolfile.file_id
2435 poolfile = get_poolfile_by_id(files_id, session)
2436 if poolfile is None:
2437 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2438 pfs.append(poolfile)
2440 df.poolfile_id = files_id
2443 # Add the src_uploaders to the DB
2444 uploader_ids = [source.maintainer_id]
2445 if u.pkg.dsc.has_key("uploaders"):
2446 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2448 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2451 for up_id in uploader_ids:
2452 if added_ids.has_key(up_id):
2454 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2460 su.maintainer_id = up_id
2461 su.source_id = source.source_id
2466 return source, dsc_component, dsc_location_id, pfs
2468 __all__.append('add_dsc_to_db')
2471 def add_deb_to_db(u, filename, session=None):
2473 Contrary to what you might expect, this routine deals with both
2474 debs and udebs. That info is in 'dbtype', whilst 'type' is
2475 'deb' for both of them
2478 entry = u.pkg.files[filename]
2481 bin.package = entry["package"]
2482 bin.version = entry["version"]
2483 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2484 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2485 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2486 bin.binarytype = entry["dbtype"]
2489 filename = entry["pool name"] + filename
2490 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2491 if not entry.get("location id", None):
2492 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2494 if entry.get("files id", None):
2495 poolfile = get_poolfile_by_id(bin.poolfile_id)
2496 bin.poolfile_id = entry["files id"]
2498 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2499 bin.poolfile_id = entry["files id"] = poolfile.file_id
2502 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2503 if len(bin_sources) != 1:
2504 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2505 (bin.package, bin.version, entry["architecture"],
2506 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2508 bin.source_id = bin_sources[0].source_id
2510 # Add and flush object so it has an ID
2513 suite_names = u.pkg.changes["distribution"].keys()
2514 bin.suites = session.query(Suite). \
2515 filter(Suite.suite_name.in_(suite_names)).all()
2519 # Deal with contents - disabled for now
2520 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2522 # print "REJECT\nCould not determine contents of package %s" % bin.package
2523 # session.rollback()
2524 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2528 __all__.append('add_deb_to_db')
2530 ################################################################################
2532 class SourceACL(object):
2533 def __init__(self, *args, **kwargs):
2537 return '<SourceACL %s>' % self.source_acl_id
2539 __all__.append('SourceACL')
2541 ################################################################################
2543 class SrcFormat(object):
2544 def __init__(self, *args, **kwargs):
2548 return '<SrcFormat %s>' % (self.format_name)
2550 __all__.append('SrcFormat')
2552 ################################################################################
2554 class SrcUploader(object):
2555 def __init__(self, *args, **kwargs):
2559 return '<SrcUploader %s>' % self.uploader_id
2561 __all__.append('SrcUploader')
2563 ################################################################################
2565 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2566 ('SuiteID', 'suite_id'),
2567 ('Version', 'version'),
2568 ('Origin', 'origin'),
2570 ('Description', 'description'),
2571 ('Untouchable', 'untouchable'),
2572 ('Announce', 'announce'),
2573 ('Codename', 'codename'),
2574 ('OverrideCodename', 'overridecodename'),
2575 ('ValidTime', 'validtime'),
2576 ('Priority', 'priority'),
2577 ('NotAutomatic', 'notautomatic'),
2578 ('CopyChanges', 'copychanges'),
2579 ('OverrideSuite', 'overridesuite')]
2581 # Why the heck don't we have any UNIQUE constraints in table suite?
2582 # TODO: Add UNIQUE constraints for appropriate columns.
2583 class Suite(ORMObject):
2584 def __init__(self, suite_name = None, version = None):
2585 self.suite_name = suite_name
2586 self.version = version
2588 def properties(self):
2589 return ['suite_name', 'version', 'sources_count', 'binaries_count']
2591 def not_null_constraints(self):
2592 return ['suite_name', 'version']
2594 def __eq__(self, val):
2595 if isinstance(val, str):
2596 return (self.suite_name == val)
2597 # This signals to use the normal comparison operator
2598 return NotImplemented
2600 def __ne__(self, val):
2601 if isinstance(val, str):
2602 return (self.suite_name != val)
2603 # This signals to use the normal comparison operator
2604 return NotImplemented
2608 for disp, field in SUITE_FIELDS:
2609 val = getattr(self, field, None)
2611 ret.append("%s: %s" % (disp, val))
2613 return "\n".join(ret)
2615 def get_architectures(self, skipsrc=False, skipall=False):
2617 Returns list of Architecture objects
2619 @type skipsrc: boolean
2620 @param skipsrc: Whether to skip returning the 'source' architecture entry
2623 @type skipall: boolean
2624 @param skipall: Whether to skip returning the 'all' architecture entry
2628 @return: list of Architecture objects for the given name (may be empty)
2631 q = object_session(self).query(Architecture).with_parent(self)
2633 q = q.filter(Architecture.arch_string != 'source')
2635 q = q.filter(Architecture.arch_string != 'all')
2636 return q.order_by(Architecture.arch_string).all()
2638 def get_sources(self, source):
2640 Returns a query object representing DBSource that is part of C{suite}.
2642 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2644 @type source: string
2645 @param source: source package name
2647 @rtype: sqlalchemy.orm.query.Query
2648 @return: a query of DBSource
2652 session = object_session(self)
2653 return session.query(DBSource).filter_by(source = source). \
2656 __all__.append('Suite')
2659 def get_suite(suite, session=None):
2661 Returns Suite object for given C{suite name}.
2664 @param suite: The name of the suite
2666 @type session: Session
2667 @param session: Optional SQLA session object (a temporary one will be
2668 generated if not supplied)
2671 @return: Suite object for the requested suite name (None if not present)
2674 q = session.query(Suite).filter_by(suite_name=suite)
2678 except NoResultFound:
2681 __all__.append('get_suite')
2683 ################################################################################
2685 # TODO: should be removed because the implementation is too trivial
2687 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2689 Returns list of Architecture objects for given C{suite} name
2692 @param suite: Suite name to search for
2694 @type skipsrc: boolean
2695 @param skipsrc: Whether to skip returning the 'source' architecture entry
2698 @type skipall: boolean
2699 @param skipall: Whether to skip returning the 'all' architecture entry
2702 @type session: Session
2703 @param session: Optional SQL session object (a temporary one will be
2704 generated if not supplied)
2707 @return: list of Architecture objects for the given name (may be empty)
2710 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2712 __all__.append('get_suite_architectures')
2714 ################################################################################
2716 class SuiteSrcFormat(object):
2717 def __init__(self, *args, **kwargs):
2721 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2723 __all__.append('SuiteSrcFormat')
2726 def get_suite_src_formats(suite, session=None):
2728 Returns list of allowed SrcFormat for C{suite}.
2731 @param suite: Suite name to search for
2733 @type session: Session
2734 @param session: Optional SQL session object (a temporary one will be
2735 generated if not supplied)
2738 @return: the list of allowed source formats for I{suite}
2741 q = session.query(SrcFormat)
2742 q = q.join(SuiteSrcFormat)
2743 q = q.join(Suite).filter_by(suite_name=suite)
2744 q = q.order_by('format_name')
2748 __all__.append('get_suite_src_formats')
2750 ################################################################################
2752 class Uid(ORMObject):
2753 def __init__(self, uid = None, name = None):
2757 def __eq__(self, val):
2758 if isinstance(val, str):
2759 return (self.uid == val)
2760 # This signals to use the normal comparison operator
2761 return NotImplemented
2763 def __ne__(self, val):
2764 if isinstance(val, str):
2765 return (self.uid != val)
2766 # This signals to use the normal comparison operator
2767 return NotImplemented
2769 def properties(self):
2770 return ['uid', 'name', 'fingerprint']
2772 def not_null_constraints(self):
2775 __all__.append('Uid')
2778 def get_or_set_uid(uidname, session=None):
2780 Returns uid object for given uidname.
2782 If no matching uidname is found, a row is inserted.
2784 @type uidname: string
2785 @param uidname: The uid to add
2787 @type session: SQLAlchemy
2788 @param session: Optional SQL session object (a temporary one will be
2789 generated if not supplied). If not passed, a commit will be performed at
2790 the end of the function, otherwise the caller is responsible for commiting.
2793 @return: the uid object for the given uidname
2796 q = session.query(Uid).filter_by(uid=uidname)
2800 except NoResultFound:
2804 session.commit_or_flush()
2809 __all__.append('get_or_set_uid')
2812 def get_uid_from_fingerprint(fpr, session=None):
2813 q = session.query(Uid)
2814 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2818 except NoResultFound:
2821 __all__.append('get_uid_from_fingerprint')
2823 ################################################################################
2825 class UploadBlock(object):
2826 def __init__(self, *args, **kwargs):
2830 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2832 __all__.append('UploadBlock')
2834 ################################################################################
2836 class DBConn(object):
2838 database module init.
2842 def __init__(self, *args, **kwargs):
2843 self.__dict__ = self.__shared_state
2845 if not getattr(self, 'initialised', False):
2846 self.initialised = True
2847 self.debug = kwargs.has_key('debug')
2850 def __setuptables(self):
2851 tables_with_primary = (
2862 'changes_pending_binaries',
2863 'changes_pending_files',
2864 'changes_pending_source',
2874 'pending_bin_contents',
2886 # The following tables have primary keys but sqlalchemy
2887 # version 0.5 fails to reflect them correctly with database
2888 # versions before upgrade #41.
2890 #'build_queue_files',
2893 tables_no_primary = (
2895 'changes_pending_files_map',
2896 'changes_pending_source_files',
2897 'changes_pool_files',
2900 'suite_architectures',
2901 'suite_src_formats',
2902 'suite_build_queue_copy',
2904 # see the comment above
2906 'build_queue_files',
2910 'almost_obsolete_all_associations',
2911 'almost_obsolete_src_associations',
2912 'any_associations_source',
2913 'bin_assoc_by_arch',
2914 'bin_associations_binaries',
2915 'binaries_suite_arch',
2916 'binfiles_suite_component_arch',
2919 'newest_all_associations',
2920 'newest_any_associations',
2922 'newest_src_association',
2923 'obsolete_all_associations',
2924 'obsolete_any_associations',
2925 'obsolete_any_by_all_associations',
2926 'obsolete_src_associations',
2928 'src_associations_bin',
2929 'src_associations_src',
2930 'suite_arch_by_name',
2933 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2934 # correctly and that is why we have to use a workaround. It can
2935 # be removed as soon as we switch to version 0.6.
2936 for table_name in tables_with_primary:
2937 table = Table(table_name, self.db_meta, \
2938 Column('id', Integer, primary_key = True), \
2939 autoload=True, useexisting=True)
2940 setattr(self, 'tbl_%s' % table_name, table)
2942 for table_name in tables_no_primary:
2943 table = Table(table_name, self.db_meta, autoload=True)
2944 setattr(self, 'tbl_%s' % table_name, table)
2946 for view_name in views:
2947 view = Table(view_name, self.db_meta, autoload=True)
2948 setattr(self, 'view_%s' % view_name, view)
2950 def __setupmappers(self):
2951 mapper(Architecture, self.tbl_architecture,
2952 properties = dict(arch_id = self.tbl_architecture.c.id,
2953 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2954 order_by='suite_name',
2955 backref=backref('architectures', order_by='arch_string'))),
2956 extension = validator)
2958 mapper(Archive, self.tbl_archive,
2959 properties = dict(archive_id = self.tbl_archive.c.id,
2960 archive_name = self.tbl_archive.c.name))
2962 mapper(BinAssociation, self.tbl_bin_associations,
2963 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2964 suite_id = self.tbl_bin_associations.c.suite,
2965 suite = relation(Suite),
2966 binary_id = self.tbl_bin_associations.c.bin,
2967 binary = relation(DBBinary)))
2969 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2970 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2971 filename = self.tbl_pending_bin_contents.c.filename,
2972 package = self.tbl_pending_bin_contents.c.package,
2973 version = self.tbl_pending_bin_contents.c.version,
2974 arch = self.tbl_pending_bin_contents.c.arch,
2975 otype = self.tbl_pending_bin_contents.c.type))
2977 mapper(DebContents, self.tbl_deb_contents,
2978 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2979 package=self.tbl_deb_contents.c.package,
2980 suite=self.tbl_deb_contents.c.suite,
2981 arch=self.tbl_deb_contents.c.arch,
2982 section=self.tbl_deb_contents.c.section,
2983 filename=self.tbl_deb_contents.c.filename))
2985 mapper(UdebContents, self.tbl_udeb_contents,
2986 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2987 package=self.tbl_udeb_contents.c.package,
2988 suite=self.tbl_udeb_contents.c.suite,
2989 arch=self.tbl_udeb_contents.c.arch,
2990 section=self.tbl_udeb_contents.c.section,
2991 filename=self.tbl_udeb_contents.c.filename))
2993 mapper(BuildQueue, self.tbl_build_queue,
2994 properties = dict(queue_id = self.tbl_build_queue.c.id))
2996 mapper(BuildQueueFile, self.tbl_build_queue_files,
2997 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2998 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3000 mapper(DBBinary, self.tbl_binaries,
3001 properties = dict(binary_id = self.tbl_binaries.c.id,
3002 package = self.tbl_binaries.c.package,
3003 version = self.tbl_binaries.c.version,
3004 maintainer_id = self.tbl_binaries.c.maintainer,
3005 maintainer = relation(Maintainer),
3006 source_id = self.tbl_binaries.c.source,
3007 source = relation(DBSource, backref='binaries'),
3008 arch_id = self.tbl_binaries.c.architecture,
3009 architecture = relation(Architecture),
3010 poolfile_id = self.tbl_binaries.c.file,
3011 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3012 binarytype = self.tbl_binaries.c.type,
3013 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3014 fingerprint = relation(Fingerprint),
3015 install_date = self.tbl_binaries.c.install_date,
3016 suites = relation(Suite, secondary=self.tbl_bin_associations,
3017 backref=backref('binaries', lazy='dynamic')),
3018 binassociations = relation(BinAssociation,
3019 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))),
3020 extension = validator)
3022 mapper(BinaryACL, self.tbl_binary_acl,
3023 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3025 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3026 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3027 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3028 architecture = relation(Architecture)))
3030 mapper(Component, self.tbl_component,
3031 properties = dict(component_id = self.tbl_component.c.id,
3032 component_name = self.tbl_component.c.name))
3034 mapper(DBConfig, self.tbl_config,
3035 properties = dict(config_id = self.tbl_config.c.id))
3037 mapper(DSCFile, self.tbl_dsc_files,
3038 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3039 source_id = self.tbl_dsc_files.c.source,
3040 source = relation(DBSource),
3041 poolfile_id = self.tbl_dsc_files.c.file,
3042 poolfile = relation(PoolFile)))
3044 mapper(PoolFile, self.tbl_files,
3045 properties = dict(file_id = self.tbl_files.c.id,
3046 filesize = self.tbl_files.c.size,
3047 location_id = self.tbl_files.c.location,
3048 location = relation(Location,
3049 # using lazy='dynamic' in the back
3050 # reference because we have A LOT of
3051 # files in one location
3052 backref=backref('files', lazy='dynamic'))),
3053 extension = validator)
3055 mapper(Fingerprint, self.tbl_fingerprint,
3056 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3057 uid_id = self.tbl_fingerprint.c.uid,
3058 uid = relation(Uid),
3059 keyring_id = self.tbl_fingerprint.c.keyring,
3060 keyring = relation(Keyring),
3061 source_acl = relation(SourceACL),
3062 binary_acl = relation(BinaryACL)),
3063 extension = validator)
3065 mapper(Keyring, self.tbl_keyrings,
3066 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3067 keyring_id = self.tbl_keyrings.c.id))
3069 mapper(DBChange, self.tbl_changes,
3070 properties = dict(change_id = self.tbl_changes.c.id,
3071 poolfiles = relation(PoolFile,
3072 secondary=self.tbl_changes_pool_files,
3073 backref="changeslinks"),
3074 seen = self.tbl_changes.c.seen,
3075 source = self.tbl_changes.c.source,
3076 binaries = self.tbl_changes.c.binaries,
3077 architecture = self.tbl_changes.c.architecture,
3078 distribution = self.tbl_changes.c.distribution,
3079 urgency = self.tbl_changes.c.urgency,
3080 maintainer = self.tbl_changes.c.maintainer,
3081 changedby = self.tbl_changes.c.changedby,
3082 date = self.tbl_changes.c.date,
3083 version = self.tbl_changes.c.version,
3084 files = relation(ChangePendingFile,
3085 secondary=self.tbl_changes_pending_files_map,
3086 backref="changesfile"),
3087 in_queue_id = self.tbl_changes.c.in_queue,
3088 in_queue = relation(PolicyQueue,
3089 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3090 approved_for_id = self.tbl_changes.c.approved_for))
3092 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3093 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3095 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3096 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3097 filename = self.tbl_changes_pending_files.c.filename,
3098 size = self.tbl_changes_pending_files.c.size,
3099 md5sum = self.tbl_changes_pending_files.c.md5sum,
3100 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3101 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3103 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3104 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3105 change = relation(DBChange),
3106 maintainer = relation(Maintainer,
3107 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3108 changedby = relation(Maintainer,
3109 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3110 fingerprint = relation(Fingerprint),
3111 source_files = relation(ChangePendingFile,
3112 secondary=self.tbl_changes_pending_source_files,
3113 backref="pending_sources")))
3116 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3117 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3118 keyring = relation(Keyring, backref="keyring_acl_map"),
3119 architecture = relation(Architecture)))
3121 mapper(Location, self.tbl_location,
3122 properties = dict(location_id = self.tbl_location.c.id,
3123 component_id = self.tbl_location.c.component,
3124 component = relation(Component),
3125 archive_id = self.tbl_location.c.archive,
3126 archive = relation(Archive),
3127 # FIXME: the 'type' column is old cruft and
3128 # should be removed in the future.
3129 archive_type = self.tbl_location.c.type),
3130 extension = validator)
3132 mapper(Maintainer, self.tbl_maintainer,
3133 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3134 maintains_sources = relation(DBSource, backref='maintainer',
3135 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3136 changed_sources = relation(DBSource, backref='changedby',
3137 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3138 extension = validator)
3140 mapper(NewComment, self.tbl_new_comments,
3141 properties = dict(comment_id = self.tbl_new_comments.c.id))
3143 mapper(Override, self.tbl_override,
3144 properties = dict(suite_id = self.tbl_override.c.suite,
3145 suite = relation(Suite),
3146 package = self.tbl_override.c.package,
3147 component_id = self.tbl_override.c.component,
3148 component = relation(Component),
3149 priority_id = self.tbl_override.c.priority,
3150 priority = relation(Priority),
3151 section_id = self.tbl_override.c.section,
3152 section = relation(Section),
3153 overridetype_id = self.tbl_override.c.type,
3154 overridetype = relation(OverrideType)))
3156 mapper(OverrideType, self.tbl_override_type,
3157 properties = dict(overridetype = self.tbl_override_type.c.type,
3158 overridetype_id = self.tbl_override_type.c.id))
3160 mapper(PolicyQueue, self.tbl_policy_queue,
3161 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3163 mapper(Priority, self.tbl_priority,
3164 properties = dict(priority_id = self.tbl_priority.c.id))
3166 mapper(Section, self.tbl_section,
3167 properties = dict(section_id = self.tbl_section.c.id,
3168 section=self.tbl_section.c.section))
3170 mapper(DBSource, self.tbl_source,
3171 properties = dict(source_id = self.tbl_source.c.id,
3172 version = self.tbl_source.c.version,
3173 maintainer_id = self.tbl_source.c.maintainer,
3174 poolfile_id = self.tbl_source.c.file,
3175 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3176 fingerprint_id = self.tbl_source.c.sig_fpr,
3177 fingerprint = relation(Fingerprint),
3178 changedby_id = self.tbl_source.c.changedby,
3179 srcfiles = relation(DSCFile,
3180 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3181 suites = relation(Suite, secondary=self.tbl_src_associations,
3182 backref=backref('sources', lazy='dynamic')),
3183 srcuploaders = relation(SrcUploader)),
3184 extension = validator)
3186 mapper(SourceACL, self.tbl_source_acl,
3187 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3189 mapper(SrcFormat, self.tbl_src_format,
3190 properties = dict(src_format_id = self.tbl_src_format.c.id,
3191 format_name = self.tbl_src_format.c.format_name))
3193 mapper(SrcUploader, self.tbl_src_uploaders,
3194 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3195 source_id = self.tbl_src_uploaders.c.source,
3196 source = relation(DBSource,
3197 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3198 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3199 maintainer = relation(Maintainer,
3200 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3202 mapper(Suite, self.tbl_suite,
3203 properties = dict(suite_id = self.tbl_suite.c.id,
3204 policy_queue = relation(PolicyQueue),
3205 copy_queues = relation(BuildQueue,
3206 secondary=self.tbl_suite_build_queue_copy)),
3207 extension = validator)
3209 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3210 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3211 suite = relation(Suite, backref='suitesrcformats'),
3212 src_format_id = self.tbl_suite_src_formats.c.src_format,
3213 src_format = relation(SrcFormat)))
3215 mapper(Uid, self.tbl_uid,
3216 properties = dict(uid_id = self.tbl_uid.c.id,
3217 fingerprint = relation(Fingerprint)),
3218 extension = validator)
3220 mapper(UploadBlock, self.tbl_upload_blocks,
3221 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3222 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3223 uid = relation(Uid, backref="uploadblocks")))
3225 ## Connection functions
3226 def __createconn(self):
3227 from config import Config
3231 connstr = "postgres://%s" % cnf["DB::Host"]
3232 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3233 connstr += ":%s" % cnf["DB::Port"]
3234 connstr += "/%s" % cnf["DB::Name"]
3237 connstr = "postgres:///%s" % cnf["DB::Name"]
3238 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3239 connstr += "?port=%s" % cnf["DB::Port"]
3241 self.db_pg = create_engine(connstr, echo=self.debug)
3242 self.db_meta = MetaData()
3243 self.db_meta.bind = self.db_pg
3244 self.db_smaker = sessionmaker(bind=self.db_pg,
3248 self.__setuptables()
3249 self.__setupmappers()
3252 return self.db_smaker()
3254 __all__.append('DBConn')